repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
akash1808/tempest
refs/heads/master
tempest/services/compute/json/migrations_client.py
6
# Copyright 2014 NEC Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from six.moves.urllib import parse as urllib from tempest.api_schema.response.compute.v2_1 import migrations as schema from tempest.common import service_client class MigrationsClient(service_client.ServiceClient): def list_migrations(self, params=None): """Lists all migrations.""" url = 'os-migrations' if params: url += '?%s' % urllib.urlencode(params) resp, body = self.get(url) body = json.loads(body) self.validate_response(schema.list_migrations, resp, body) return service_client.ResponseBodyList(resp, body['migrations'])
Pistachitos/Sick-Beard
refs/heads/Pistachitos
sickbeard/db.py
2
# Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import os.path import re import sqlite3 import time import threading import sickbeard from sickbeard import encodingKludge as ek from sickbeard import logger from sickbeard.exceptions import ex db_lock = threading.Lock() def dbFilename(filename="sickbeard.db", suffix=None): """ @param filename: The sqlite database filename to use. If not specified, will be made to be sickbeard.db @param suffix: The suffix to append to the filename. A '.' will be added automatically, i.e. suffix='v0' will make dbfile.db.v0 @return: the correct location of the database file. """ if suffix: filename = "%s.%s" % (filename, suffix) return ek.ek(os.path.join, sickbeard.DATA_DIR, filename) class DBConnection: def __init__(self, filename="sickbeard.db", suffix=None, row_type=None): self.filename = filename self.connection = sqlite3.connect(dbFilename(filename), 20) if row_type == "dict": self.connection.row_factory = self._dict_factory else: self.connection.row_factory = sqlite3.Row def action(self, query, args=None): with db_lock: if query == None: return sqlResult = None attempt = 0 while attempt < 5: try: if args == None: logger.log(self.filename+": "+query, logger.DB) sqlResult = self.connection.execute(query) else: logger.log(self.filename+": "+query+" with args "+str(args), logger.DB) sqlResult = self.connection.execute(query, args) self.connection.commit() # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError, e: if "unable to open database file" in e.message or "database is locked" in e.message: logger.log(u"DB error: "+ex(e), logger.WARNING) attempt += 1 time.sleep(1) else: logger.log(u"DB error: "+ex(e), logger.ERROR) raise except sqlite3.DatabaseError, e: logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR) raise return sqlResult def select(self, query, args=None): sqlResults = self.action(query, args).fetchall() if sqlResults == None: return [] return sqlResults def upsert(self, tableName, valueDict, keyDict): changesBefore = self.connection.total_changes genParams = lambda myDict : [x + " = ?" for x in myDict.keys()] query = "UPDATE "+tableName+" SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict)) self.action(query, valueDict.values() + keyDict.values()) if self.connection.total_changes == changesBefore: query = "INSERT INTO "+tableName+" (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \ " VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")" self.action(query, valueDict.values() + keyDict.values()) def tableInfo(self, tableName): # FIXME ? binding is not supported here, but I cannot find a way to escape a string manually cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName) columns = {} for column in cursor: columns[column['name']] = { 'type': column['type'] } return columns # http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query def _dict_factory(self, cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def sanityCheckDatabase(connection, sanity_check): sanity_check(connection).check() class DBSanityCheck(object): def __init__(self, connection): self.connection = connection def check(self): pass # =============== # = Upgrade API = # =============== def upgradeDatabase(connection, schema): logger.log(u"Checking database structure...", logger.MESSAGE) _processUpgrade(connection, schema) def prettyName(str): return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", str)]) def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) if not instance.test(): logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.MESSAGE) try: instance.execute() except sqlite3.DatabaseError, e: print "Error in " + str(upgradeClass.__name__) + ": " + ex(e) raise logger.log(upgradeClass.__name__ + " upgrade completed", logger.DEBUG) else: logger.log(upgradeClass.__name__ + " upgrade not required", logger.DEBUG) for upgradeSubClass in upgradeClass.__subclasses__(): _processUpgrade(connection, upgradeSubClass) # Base migration class. All future DB changes should be subclassed from this class class SchemaUpgrade (object): def __init__(self, connection): self.connection = connection def hasTable(self, tableName): return len(self.connection.action("SELECT 1 FROM sqlite_master WHERE name = ?;", (tableName, )).fetchall()) > 0 def hasColumn(self, tableName, column): return column in self.connection.tableInfo(tableName) def addColumn(self, table, column, type="NUMERIC", default=0): self.connection.action("ALTER TABLE %s ADD %s %s" % (table, column, type)) self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,)) def checkDBVersion(self): result = self.connection.select("SELECT db_version FROM db_version") if result: return int(result[0]["db_version"]) else: return 0 def incDBVersion(self): curVersion = self.checkDBVersion() self.connection.action("UPDATE db_version SET db_version = ?", [curVersion+1]) return curVersion+1
2014c2g9/c2g9
refs/heads/master
exts/w2/static/Brython2.0.0-20140209-164925/Lib/unittest/test/test_program.py
738
import io import os import sys import unittest class Test_TestProgram(unittest.TestCase): def test_discovery_from_dotted_path(self): loader = unittest.TestLoader() tests = [self] expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__)) self.wasRun = False def _find_tests(start_dir, pattern): self.wasRun = True self.assertEqual(start_dir, expectedPath) return tests loader._find_tests = _find_tests suite = loader.discover('unittest.test') self.assertTrue(self.wasRun) self.assertEqual(suite._tests, tests) # Horrible white box test def testNoExit(self): result = object() test = object() class FakeRunner(object): def run(self, test): self.test = test return result runner = FakeRunner() oldParseArgs = unittest.TestProgram.parseArgs def restoreParseArgs(): unittest.TestProgram.parseArgs = oldParseArgs unittest.TestProgram.parseArgs = lambda *args: None self.addCleanup(restoreParseArgs) def removeTest(): del unittest.TestProgram.test unittest.TestProgram.test = test self.addCleanup(removeTest) program = unittest.TestProgram(testRunner=runner, exit=False, verbosity=2) self.assertEqual(program.result, result) self.assertEqual(runner.test, test) self.assertEqual(program.verbosity, 2) class FooBar(unittest.TestCase): def testPass(self): assert True def testFail(self): assert False class FooBarLoader(unittest.TestLoader): """Test loader that returns a suite containing FooBar.""" def loadTestsFromModule(self, module): return self.suiteClass( [self.loadTestsFromTestCase(Test_TestProgram.FooBar)]) def test_NonExit(self): program = unittest.main(exit=False, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=io.StringIO()), testLoader=self.FooBarLoader()) self.assertTrue(hasattr(program, 'result')) def test_Exit(self): self.assertRaises( SystemExit, unittest.main, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=io.StringIO()), exit=True, testLoader=self.FooBarLoader()) def test_ExitAsDefault(self): self.assertRaises( SystemExit, unittest.main, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=io.StringIO()), testLoader=self.FooBarLoader()) class InitialisableProgram(unittest.TestProgram): exit = False result = None verbosity = 1 defaultTest = None testRunner = None testLoader = unittest.defaultTestLoader module = '__main__' progName = 'test' test = 'test' def __init__(self, *args): pass RESULT = object() class FakeRunner(object): initArgs = None test = None raiseError = False def __init__(self, **kwargs): FakeRunner.initArgs = kwargs if FakeRunner.raiseError: FakeRunner.raiseError = False raise TypeError def run(self, test): FakeRunner.test = test return RESULT class TestCommandLineArgs(unittest.TestCase): def setUp(self): self.program = InitialisableProgram() self.program.createTests = lambda: None FakeRunner.initArgs = None FakeRunner.test = None FakeRunner.raiseError = False def testVerbosity(self): program = self.program for opt in '-q', '--quiet': program.verbosity = 1 program.parseArgs([None, opt]) self.assertEqual(program.verbosity, 0) for opt in '-v', '--verbose': program.verbosity = 1 program.parseArgs([None, opt]) self.assertEqual(program.verbosity, 2) def testBufferCatchFailfast(self): program = self.program for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'), ('catch', 'catchbreak')): if attr == 'catch' and not hasInstallHandler: continue short_opt = '-%s' % arg[0] long_opt = '--%s' % arg for opt in short_opt, long_opt: setattr(program, attr, None) program.parseArgs([None, opt]) self.assertTrue(getattr(program, attr)) for opt in short_opt, long_opt: not_none = object() setattr(program, attr, not_none) program.parseArgs([None, opt]) self.assertEqual(getattr(program, attr), not_none) def testWarning(self): """Test the warnings argument""" # see #10535 class FakeTP(unittest.TestProgram): def parseArgs(self, *args, **kw): pass def runTests(self, *args, **kw): pass warnoptions = sys.warnoptions[:] try: sys.warnoptions[:] = [] # no warn options, no arg -> default self.assertEqual(FakeTP().warnings, 'default') # no warn options, w/ arg -> arg value self.assertEqual(FakeTP(warnings='ignore').warnings, 'ignore') sys.warnoptions[:] = ['somevalue'] # warn options, no arg -> None # warn options, w/ arg -> arg value self.assertEqual(FakeTP().warnings, None) self.assertEqual(FakeTP(warnings='ignore').warnings, 'ignore') finally: sys.warnoptions[:] = warnoptions def testRunTestsRunnerClass(self): program = self.program program.testRunner = FakeRunner program.verbosity = 'verbosity' program.failfast = 'failfast' program.buffer = 'buffer' program.warnings = 'warnings' program.runTests() self.assertEqual(FakeRunner.initArgs, {'verbosity': 'verbosity', 'failfast': 'failfast', 'buffer': 'buffer', 'warnings': 'warnings'}) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) def testRunTestsRunnerInstance(self): program = self.program program.testRunner = FakeRunner() FakeRunner.initArgs = None program.runTests() # A new FakeRunner should not have been instantiated self.assertIsNone(FakeRunner.initArgs) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) def testRunTestsOldRunnerClass(self): program = self.program FakeRunner.raiseError = True program.testRunner = FakeRunner program.verbosity = 'verbosity' program.failfast = 'failfast' program.buffer = 'buffer' program.test = 'test' program.runTests() # If initialising raises a type error it should be retried # without the new keyword arguments self.assertEqual(FakeRunner.initArgs, {}) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) def testCatchBreakInstallsHandler(self): module = sys.modules['unittest.main'] original = module.installHandler def restore(): module.installHandler = original self.addCleanup(restore) self.installed = False def fakeInstallHandler(): self.installed = True module.installHandler = fakeInstallHandler program = self.program program.catchbreak = True program.testRunner = FakeRunner program.runTests() self.assertTrue(self.installed) def _patch_isfile(self, names, exists=True): def isfile(path): return path in names original = os.path.isfile os.path.isfile = isfile def restore(): os.path.isfile = original self.addCleanup(restore) def testParseArgsFileNames(self): # running tests with filenames instead of module names program = self.program argv = ['progname', 'foo.py', 'bar.Py', 'baz.PY', 'wing.txt'] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) # note that 'wing.txt' is not a Python file so the name should # *not* be converted to a module name expected = ['foo', 'bar', 'baz', 'wing.txt'] self.assertEqual(program.testNames, expected) def testParseArgsFilePaths(self): program = self.program argv = ['progname', 'foo/bar/baz.py', 'green\\red.py'] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) expected = ['foo.bar.baz', 'green.red'] self.assertEqual(program.testNames, expected) def testParseArgsNonExistentFiles(self): program = self.program argv = ['progname', 'foo/bar/baz.py', 'green\\red.py'] self._patch_isfile([]) program.createTests = lambda: None program.parseArgs(argv) self.assertEqual(program.testNames, argv[1:]) def testParseArgsAbsolutePathsThatCanBeConverted(self): cur_dir = os.getcwd() program = self.program def _join(name): return os.path.join(cur_dir, name) argv = ['progname', _join('foo/bar/baz.py'), _join('green\\red.py')] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) expected = ['foo.bar.baz', 'green.red'] self.assertEqual(program.testNames, expected) def testParseArgsAbsolutePathsThatCannotBeConverted(self): program = self.program # even on Windows '/...' is considered absolute by os.path.abspath argv = ['progname', '/foo/bar/baz.py', '/green/red.py'] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) self.assertEqual(program.testNames, argv[1:]) # it may be better to use platform specific functions to normalise paths # rather than accepting '.PY' and '\' as file seprator on Linux / Mac # it would also be better to check that a filename is a valid module # identifier (we have a regex for this in loader.py) # for invalid filenames should we raise a useful error rather than # leaving the current error message (import of filename fails) in place? if __name__ == '__main__': unittest.main()
wildtetris/python-social-auth
refs/heads/master
social/backends/weibo.py
67
# coding:utf8 # author:hepochen@gmail.com https://github.com/hepochen """ Weibo OAuth2 backend, docs at: http://psa.matiasaguirre.net/docs/backends/weibo.html """ from social.backends.oauth import BaseOAuth2 class WeiboOAuth2(BaseOAuth2): """Weibo (of sina) OAuth authentication backend""" name = 'weibo' ID_KEY = 'uid' AUTHORIZATION_URL = 'https://api.weibo.com/oauth2/authorize' REQUEST_TOKEN_URL = 'https://api.weibo.com/oauth2/request_token' ACCESS_TOKEN_URL = 'https://api.weibo.com/oauth2/access_token' ACCESS_TOKEN_METHOD = 'POST' REDIRECT_STATE = False EXTRA_DATA = [ ('id', 'id'), ('name', 'username'), ('profile_image_url', 'profile_image_url'), ('gender', 'gender') ] def get_user_details(self, response): """Return user details from Weibo. API URL is: https://api.weibo.com/2/users/show.json/?uid=<UID>&access_token=<TOKEN> """ if self.setting('DOMAIN_AS_USERNAME'): username = response.get('domain', '') else: username = response.get('name', '') fullname, first_name, last_name = self.get_user_names( first_name=response.get('screen_name', '') ) return {'username': username, 'fullname': fullname, 'first_name': first_name, 'last_name': last_name} def get_uid(self, access_token): """Return uid by access_token""" data = self.get_json( 'https://api.weibo.com/oauth2/get_token_info', method='POST', params={'access_token': access_token} ) return data['uid'] def user_data(self, access_token, response=None, *args, **kwargs): """Return user data""" # If user id was not retrieved in the response, then get it directly # from weibo get_token_info endpoint uid = response and response.get('uid') or self.get_uid(access_token) user_data = self.get_json( 'https://api.weibo.com/2/users/show.json', params={'access_token': access_token, 'uid': uid} ) user_data['uid'] = uid return user_data
telefonicaid/murano
refs/heads/master
murano/tests/functional/engine/__init__.py
12133432
WikipediaLibrary/TWLight
refs/heads/master
TWLight/ezproxy/__init__.py
12133432
charleswhchan/huey
refs/heads/master
huey/backends/__init__.py
12133432
petertodd/namecoin
refs/heads/master
client/DNS/Type.py
40
# -*- encoding: utf-8 -*- """ $Id: Type.py,v 1.6.2.2 2009/06/09 18:39:06 customdesigned Exp $ This file is part of the pydns project. Homepage: http://pydns.sourceforge.net This code is covered by the standard Python License. TYPE values (section 3.2.2) """ A = 1 # a host address NS = 2 # an authoritative name server MD = 3 # a mail destination (Obsolete - use MX) MF = 4 # a mail forwarder (Obsolete - use MX) CNAME = 5 # the canonical name for an alias SOA = 6 # marks the start of a zone of authority MB = 7 # a mailbox domain name (EXPERIMENTAL) MG = 8 # a mail group member (EXPERIMENTAL) MR = 9 # a mail rename domain name (EXPERIMENTAL) NULL = 10 # a null RR (EXPERIMENTAL) WKS = 11 # a well known service description PTR = 12 # a domain name pointer HINFO = 13 # host information MINFO = 14 # mailbox or mail list information MX = 15 # mail exchange TXT = 16 # text strings AAAA = 28 # IPv6 AAAA records (RFC 1886) SRV = 33 # DNS RR for specifying the location of services (RFC 2782) SPF = 99 # TXT RR for Sender Policy Framework # Additional TYPE values from host.c source UNAME = 110 MP = 240 # QTYPE values (section 3.2.3) AXFR = 252 # A request for a transfer of an entire zone MAILB = 253 # A request for mailbox-related records (MB, MG or MR) MAILA = 254 # A request for mail agent RRs (Obsolete - see MX) ANY = 255 # A request for all records # Construct reverse mapping dictionary _names = dir() typemap = {} for _name in _names: if _name[0] != '_': typemap[eval(_name)] = _name def typestr(type): if typemap.has_key(type): return typemap[type] else: return `type` # # $Log: Type.py,v $ # Revision 1.6.2.2 2009/06/09 18:39:06 customdesigned # Built-in SPF support # # Revision 1.6.2.1 2007/05/22 20:20:39 customdesigned # Mark utf-8 encoding # # Revision 1.6 2002/03/19 12:41:33 anthonybaxter # tabnannied and reindented everything. 4 space indent, no tabs. # yay. # # Revision 1.5 2002/03/19 12:26:13 anthonybaxter # death to leading tabs. # # Revision 1.4 2001/08/09 09:08:55 anthonybaxter # added identifying header to top of each file # # Revision 1.3 2001/07/19 07:38:28 anthony # added type code for SRV. From Michael Ströder. # # Revision 1.2 2001/07/19 06:57:07 anthony # cvs keywords added # #
ftomassetti/intellij-community
refs/heads/master
python/testData/console/indent8.py
160
def foo(): print 'foo' #comment with indent print 'bar'
ddurdle/GDrive-for-KODI
refs/heads/master
decryptFolder.py
2
from resources.lib import encryption #from subprocess import call import sys import os saltFile = str(sys.argv[1]) password = str(sys.argv[2]) source = str(sys.argv[3]) target = str(sys.argv[4]) encrypt = encryption.encryption(saltFile,password) #encrypt.encryptString(file) #print encrypt.decryptString(file) def decrypt_dir(source, target): current, dirs, files = os.walk(source).next() for file in files: encrypt.decryptFile(source + '/' + file, target +'/'+encrypt.decryptString(file)) #print encrypt.generateSalt() decrypt_dir(source, target)
Monaden/Collaborastory
refs/heads/master
src/main/models.py
1
from django.db import models from django.contrib.auth.models import User from uuid import uuid1 class Story(models.Model): text = models.TextField() completed = models.BooleanField(default=False) date = models.DateTimeField(verbose_name=u"Time",auto_now=True) score = models.IntegerField(default=5) def __str__(self): return self.text class Author(models.Model): user = models.ForeignKey(User) story = models.ForeignKey(Story) order = models.IntegerField(default=0) uuid = models.CharField(max_length=64,default='' ) def __str__(self): return self.user.__str__() class Word(models.Model): text = models.CharField(max_length=200) author = models.ForeignKey(User,verbose_name=u"Author") story = models.ForeignKey(Story,verbose_name=u"Story ") time = models.DateTimeField(verbose_name=u"Time",auto_now=True) def __str__(self): return self.text
Russell-IO/ansible
refs/heads/devel
lib/ansible/modules/network/aci/aci_tenant_span_src_group_to_dst_group.py
12
#!/usr/bin/python # -*- coding: utf-8 -*- # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: aci_tenant_span_src_group_to_dst_group short_description: Bind SPAN source groups to destination groups (span:SpanLbl) description: - Bind SPAN source groups to associated destinaton groups on Cisco ACI fabrics. notes: - The C(tenant), C(src_group), and C(dst_group) must exist before using this module in your playbook. The M(aci_tenant), M(aci_tenant_span_src_group), and M(aci_tenant_span_dst_group) modules can be used for this. - More information about the internal APIC class B(span:SrcGrp) from L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/). author: - Jacob McGill (@jmcgill298) version_added: '2.4' options: description: description: - The description for Span source group to destination group binding. aliases: [ descr ] dst_group: description: - The Span destination group to associate with the source group. src_group: description: - The name of the Span source group. state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. choices: [ absent, present, query ] default: present tenant: description: - The name of the Tenant. aliases: [ tenant_name ] extends_documentation_fragment: aci ''' EXAMPLES = r''' - aci_tenant_span_src_group_to_dst_group: host: apic username: admin password: SomeSecretPassword tenant: production src_group: "{{ src_group }}" dst_group: "{{ dst_group }}" description: "{{ description }}" ''' RETURN = r''' current: description: The existing configuration from the APIC after the module has finished returned: success type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production environment", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] error: description: The error information as returned from the APIC returned: failure type: dict sample: { "code": "122", "text": "unknown managed object class foo" } raw: description: The raw output returned by the APIC REST API (xml or json) returned: parse error type: string sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>' sent: description: The actual/minimal configuration pushed to the APIC returned: info type: list sample: { "fvTenant": { "attributes": { "descr": "Production environment" } } } previous: description: The original configuration from the APIC before the module has started returned: info type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] proposed: description: The assembled configuration from the user-provided parameters returned: info type: dict sample: { "fvTenant": { "attributes": { "descr": "Production environment", "name": "production" } } } filter_string: description: The filter string used for the request returned: failure or debug type: string sample: ?rsp-prop-include=config-only method: description: The HTTP method used for the request to the APIC returned: failure or debug type: string sample: POST response: description: The HTTP response from the APIC returned: failure or debug type: string sample: OK (30 bytes) status: description: The HTTP status from the APIC returned: failure or debug type: int sample: 200 url: description: The HTTP url used for the request to the APIC returned: failure or debug type: string sample: https://10.11.12.13/api/mo/uni/tn-production.json ''' from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec from ansible.module_utils.basic import AnsibleModule def main(): argument_spec = aci_argument_spec() argument_spec.update( description=dict(type='str', aliases=['descr']), dst_group=dict(type='str'), # Not required for querying all objects src_group=dict(type='str'), # Not required for querying all objects state=dict(type='str', default='present', choices=['absent', 'present', 'query']), tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['dst_group', 'src_group', 'tenant']], ['state', 'present', ['dst_group', 'src_group', 'tenant']], ], ) description = module.params['description'] dst_group = module.params['dst_group'] src_group = module.params['src_group'] state = module.params['state'] tenant = module.params['tenant'] aci = ACIModule(module) aci.construct_url( root_class=dict( aci_class='fvTenant', aci_rn='tn-{0}'.format(tenant), filter_target='eq(fvTenant.name, "{0}")'.format(tenant), module_object=tenant, ), subclass_1=dict( aci_class='spanSrcGrp', aci_rn='srcgrp-{0}'.format(src_group), filter_target='eq(spanSrcGrp.name, "{0}")'.format(src_group), module_object=src_group, ), subclass_2=dict( aci_class='spanSpanLbl', aci_rn='spanlbl-{0}'.format(dst_group), filter_target='eq(spanSpanLbl.name, "{0}")'.format(dst_group), module_object=dst_group, ), ) aci.get_existing() if state == 'present': aci.payload( aci_class='spanSpanLbl', class_config=dict( descr=description, name=dst_group, ), ) aci.get_diff(aci_class='spanSpanLbl') aci.post_config() elif state == 'absent': aci.delete_config() aci.exit_json() if __name__ == "__main__": main()
ge0rgi/cinder
refs/heads/stable/ocata
cinder/tests/unit/api/v3/test_snapshot_manage.py
1
# Copyright (c) 2016 Stratoscale, Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from oslo_config import cfg from oslo_serialization import jsonutils try: from urllib import urlencode except ImportError: from urllib.parse import urlencode import webob from cinder.api.v3 import router as router_v3 from cinder import context from cinder import objects from cinder import test from cinder.tests.unit.api.contrib import test_snapshot_manage as test_contrib from cinder.tests.unit.api import fakes from cinder.tests.unit import fake_constants as fake from cinder.tests.unit import fake_service CONF = cfg.CONF def app(): # no auth, just let environ['cinder.context'] pass through api = router_v3.APIRouter() mapper = fakes.urlmap.URLMap() mapper['/v3'] = api return mapper @ddt.ddt @mock.patch('cinder.volume.api.API.get', test_contrib.volume_get) class SnapshotManageTest(test.TestCase): """Test cases for cinder/api/v3/snapshot_manage.py""" def setUp(self): super(SnapshotManageTest, self).setUp() self._admin_ctxt = context.RequestContext(fake.USER_ID, fake.PROJECT_ID, True) def _get_resp_post(self, body, version="3.8"): """Helper to execute a POST manageable_snapshots API call.""" req = webob.Request.blank('/v3/%s/manageable_snapshots' % fake.PROJECT_ID) req.method = 'POST' req.headers['Content-Type'] = 'application/json' req.headers['OpenStack-API-Version'] = 'volume ' + version req.environ['cinder.context'] = self._admin_ctxt req.body = jsonutils.dump_as_bytes(body) res = req.get_response(app()) return res @mock.patch('cinder.volume.rpcapi.VolumeAPI.manage_existing_snapshot') @mock.patch('cinder.volume.api.API.create_snapshot_in_db') @mock.patch('cinder.objects.service.Service.get_by_id') def test_manage_snapshot_route(self, mock_service_get, mock_create_snapshot, mock_rpcapi): """Test call to manage snapshot. There is currently no change between the API in contrib and the API in v3, so here we simply check that the call is routed properly, rather than copying all the tests. """ mock_service_get.return_value = fake_service.fake_service_obj( self._admin_ctxt, binary='cinder-volume') body = {'snapshot': {'volume_id': fake.VOLUME_ID, 'ref': 'fake_ref'}} res = self._get_resp_post(body) self.assertEqual(202, res.status_int, res) def test_manage_snapshot_previous_version(self): body = {'snapshot': {'volume_id': fake.VOLUME_ID, 'ref': 'fake_ref'}} res = self._get_resp_post(body, version="3.7") self.assertEqual(404, res.status_int, res) def _get_resp_get(self, host, detailed, paging, version="3.8", **kwargs): """Helper to execute a GET os-snapshot-manage API call.""" params = {'host': host} if host else {} params.update(kwargs) if paging: params.update({'marker': '1234', 'limit': 10, 'offset': 4, 'sort': 'reference:asc'}) query_string = "?%s" % urlencode(params) detail = "" if detailed: detail = "/detail" req = webob.Request.blank('/v3/%s/manageable_snapshots%s%s' % (fake.PROJECT_ID, detail, query_string)) req.method = 'GET' req.headers['Content-Type'] = 'application/json' req.headers['OpenStack-API-Version'] = 'volume ' + version req.environ['cinder.context'] = self._admin_ctxt res = req.get_response(app()) return res @mock.patch('cinder.volume.api.API.get_manageable_snapshots', wraps=test_contrib.api_get_manageable_snapshots) def test_get_manageable_snapshots_route(self, mock_api_manageable): """Test call to get manageable volumes. There is currently no change between the API in contrib and the API in v3, so here we simply check that the call is routed properly, rather than copying all the tests. """ res = self._get_resp_get('fakehost', False, False) self.assertEqual(200, res.status_int) def test_get_manageable_snapshots_previous_version(self): res = self._get_resp_get('fakehost', False, False, version="3.7") self.assertEqual(404, res.status_int) @mock.patch('cinder.volume.api.API.get_manageable_snapshots', wraps=test_contrib.api_get_manageable_snapshots) def test_get_manageable_snapshots_detail_route(self, mock_api_manageable): """Test call to get manageable volumes (detailed). There is currently no change between the API in contrib and the API in v3, so here we simply check that the call is routed properly, rather than copying all the tests. """ res = self._get_resp_get('fakehost', True, True) self.assertEqual(200, res.status_int) def test_get_manageable_snapshots_detail_previous_version(self): res = self._get_resp_get('fakehost', True, True, version="3.7") self.assertEqual(404, res.status_int) @ddt.data((True, True, 'detail_list'), (True, False, 'summary_list'), (False, True, 'detail_list'), (False, False, 'summary_list')) @ddt.unpack @mock.patch('cinder.objects.Service.is_up', True) @mock.patch('cinder.volume.rpcapi.VolumeAPI._get_cctxt') @mock.patch('cinder.objects.Service.get_by_id') def test_get_manageable_detail(self, clustered, is_detail, view_method, get_service_mock, get_cctxt_mock): if clustered: host = None cluster_name = 'mycluster' version = '3.17' kwargs = {'cluster': cluster_name} else: host = 'fakehost' cluster_name = None version = '3.8' kwargs = {} service = objects.Service(disabled=False, host='fakehost', cluster_name=cluster_name) get_service_mock.return_value = service snaps = [mock.sentinel.snap1, mock.sentinel.snap2] get_cctxt_mock.return_value.call.return_value = snaps view_data = {'manageable-snapshots': [{'vol': 'mock.sentinel.snap1'}, {'vol': 'mock.sentinel.snap2'}]} view_path = ('cinder.api.views.manageable_snapshots.ViewBuilder.' + view_method) with mock.patch(view_path, return_value=view_data) as detail_view_mock: res = self._get_resp_get(host, is_detail, False, version=version, **kwargs) self.assertEqual(200, res.status_int) get_cctxt_mock.assert_called_once_with(service.service_topic_queue, version=('3.10', '3.0')) get_cctxt_mock.return_value.call.assert_called_once_with( mock.ANY, 'get_manageable_snapshots', marker=None, limit=CONF.osapi_max_limit, offset=0, sort_keys=['reference'], sort_dirs=['desc'], want_objects=True) detail_view_mock.assert_called_once_with(mock.ANY, snaps, len(snaps)) get_service_mock.assert_called_once_with( mock.ANY, None, host=host, binary='cinder-volume', cluster_name=cluster_name) @ddt.data('3.8', '3.17') def test_get_manageable_missing_host(self, version): res = self._get_resp_get(None, True, False, version=version) self.assertEqual(400, res.status_int) def test_get_manageable_both_host_cluster(self): res = self._get_resp_get('host', True, False, version='3.17', cluster='cluster') self.assertEqual(400, res.status_int)
hujiajie/chromium-crosswalk
refs/heads/master
tools/site_compare/operators/equals.py
189
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Compare two images for equality.""" from PIL import Image from PIL import ImageChops def Compare(file1, file2, **kwargs): """Compares two images to see if they're identical. Args: file1: path to first image to compare file2: path to second image to compare kwargs: unused for this operator Returns: None if the images are identical A tuple of (errorstring, image) if they're not """ kwargs = kwargs # unused parameter im1 = Image.open(file1) im2 = Image.open(file2) if im1.size != im2.size: return ("The images are of different size (%s vs %s)" % (im1.size, im2.size), im1) diff = ImageChops.difference(im1, im2) if max(diff.getextrema()) != (0, 0): return ("The images differ", diff) else: return None
mreq/dotfiles
refs/heads/master
scripts/wm/x11winmatch.py
1
#!/usr/bin/env python3 from Xlib import display, X, Xatom import time import os class X11WinWatch: def __init__(self): self.display = display.Display() self.root = self.display.screen().root self.root.change_attributes(event_mask=(X.PropertyChangeMask)) self.ACTIVE = self.display.intern_atom("_NET_ACTIVE_WINDOW") self.display.flush() self.activeWindow = self.root.get_full_property(self.ACTIVE, 0).value[0] self.doActiveWindow() self.run() def doActiveWindow(self): active = self.root.get_full_property(self.ACTIVE, 0).value[0] if active != self.activeWindow: # Window changed self.activeWindow = active os.system('~/scripts/wm/on_window_change.sh ' + str(active)) def run(self): while 1: while self.display.pending_events(): e = self.display.next_event() if e.type == X.PropertyNotify: self.doActiveWindow() time.sleep(0.1) X11WinWatch()
supersven/intellij-community
refs/heads/master
python/helpers/python-skeletons/itertools.py
62
"""Skeleton for 'itertools' stdlib module.""" class islice(object): def __init__(self, iterable, start, stop=None, step=None): """ :type iterable: collections.Iterable[T] :type start: numbers.Integral :type stop: numbers.Integral | None :type step: numbers.Integral | None :rtype: itertools.islice[T] """ pass
gchp/django
refs/heads/master
django/contrib/sites/shortcuts.py
615
from __future__ import unicode_literals from django.apps import apps def get_current_site(request): """ Checks if contrib.sites is installed and returns either the current ``Site`` object or a ``RequestSite`` object based on the request. """ # Imports are inside the function because its point is to avoid importing # the Site models when django.contrib.sites isn't installed. if apps.is_installed('django.contrib.sites'): from .models import Site return Site.objects.get_current(request) else: from .requests import RequestSite return RequestSite(request)
GDGLima/contentbox
refs/heads/master
third_party/django/contrib/gis/geos/polygon.py
219
from ctypes import c_uint, byref from django.contrib.gis.geos.geometry import GEOSGeometry from django.contrib.gis.geos.libgeos import get_pointer_arr, GEOM_PTR from django.contrib.gis.geos.linestring import LinearRing from django.contrib.gis.geos import prototypes as capi from django.utils import six from django.utils.six.moves import xrange class Polygon(GEOSGeometry): _minlength = 1 def __init__(self, *args, **kwargs): """ Initializes on an exterior ring and a sequence of holes (both instances may be either LinearRing instances, or a tuple/list that may be constructed into a LinearRing). Examples of initialization, where shell, hole1, and hole2 are valid LinearRing geometries: >>> poly = Polygon(shell, hole1, hole2) >>> poly = Polygon(shell, (hole1, hole2)) Example where a tuple parameters are used: >>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)), ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4))) """ if not args: raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.') # Getting the ext_ring and init_holes parameters from the argument list ext_ring = args[0] init_holes = args[1:] n_holes = len(init_holes) # If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility] if n_holes == 1 and isinstance(init_holes[0], (tuple, list)): if len(init_holes[0]) == 0: init_holes = () n_holes = 0 elif isinstance(init_holes[0][0], LinearRing): init_holes = init_holes[0] n_holes = len(init_holes) polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes) super(Polygon, self).__init__(polygon, **kwargs) def __iter__(self): "Iterates over each ring in the polygon." for i in xrange(len(self)): yield self[i] def __len__(self): "Returns the number of rings in this Polygon." return self.num_interior_rings + 1 @classmethod def from_bbox(cls, bbox): "Constructs a Polygon from a bounding box (4-tuple)." x0, y0, x1, y1 = bbox for z in bbox: if not isinstance(z, six.integer_types + (float,)): return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)) return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0))) ### These routines are needed for list-like operation w/ListMixin ### def _create_polygon(self, length, items): # Instantiate LinearRing objects if necessary, but don't clone them yet # _construct_ring will throw a TypeError if a parameter isn't a valid ring # If we cloned the pointers here, we wouldn't be able to clean up # in case of error. rings = [] for r in items: if isinstance(r, GEOM_PTR): rings.append(r) else: rings.append(self._construct_ring(r)) shell = self._clone(rings.pop(0)) n_holes = length - 1 if n_holes: holes = get_pointer_arr(n_holes) for i, r in enumerate(rings): holes[i] = self._clone(r) holes_param = byref(holes) else: holes_param = None return capi.create_polygon(shell, holes_param, c_uint(n_holes)) def _clone(self, g): if isinstance(g, GEOM_PTR): return capi.geom_clone(g) else: return capi.geom_clone(g.ptr) def _construct_ring(self, param, msg='Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings'): "Helper routine for trying to construct a ring from the given parameter." if isinstance(param, LinearRing): return param try: ring = LinearRing(param) return ring except TypeError: raise TypeError(msg) def _set_list(self, length, items): # Getting the current pointer, replacing with the newly constructed # geometry, and destroying the old geometry. prev_ptr = self.ptr srid = self.srid self.ptr = self._create_polygon(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) def _get_single_internal(self, index): """ Returns the ring at the specified index. The first index, 0, will always return the exterior ring. Indices > 0 will return the interior ring at the given index (e.g., poly[1] and poly[2] would return the first and second interior ring, respectively). CAREFUL: Internal/External are not the same as Interior/Exterior! _get_single_internal returns a pointer from the existing geometries for use internally by the object's methods. _get_single_external returns a clone of the same geometry for use by external code. """ if index == 0: return capi.get_extring(self.ptr) else: # Getting the interior ring, have to subtract 1 from the index. return capi.get_intring(self.ptr, index-1) def _get_single_external(self, index): return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild #### Polygon Properties #### @property def num_interior_rings(self): "Returns the number of interior rings." # Getting the number of rings return capi.get_nrings(self.ptr) def _get_ext_ring(self): "Gets the exterior ring of the Polygon." return self[0] def _set_ext_ring(self, ring): "Sets the exterior ring of the Polygon." self[0] = ring # Properties for the exterior ring/shell. exterior_ring = property(_get_ext_ring, _set_ext_ring) shell = exterior_ring @property def tuple(self): "Gets the tuple for each ring in this Polygon." return tuple([self[i].tuple for i in xrange(len(self))]) coords = tuple @property def kml(self): "Returns the KML representation of this Polygon." inner_kml = ''.join(["<innerBoundaryIs>%s</innerBoundaryIs>" % self[i+1].kml for i in xrange(self.num_interior_rings)]) return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
yangxianbo/jym
refs/heads/master
account/tests.py
24123
from django.test import TestCase # Create your tests here.
ywcui1990/htmresearch
refs/heads/master
htmresearch/frameworks/layers/laminar_network.py
3
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2016, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ Overall factory methods to create networks of multiple layers and for experimenting with different laminar structures. There are two main types of networks: L2L4 networks, and, L2456 networks. Either type can be created as a single column or with multiple columns. Each type has its own creation file (imported here) - see that file for detailed descriptions. """ from nupic.engine import Network from htmresearch.frameworks.layers.l2_l4_network_creation import ( createL4L2Column, createMultipleL4L2Columns, createMultipleL4L2ColumnsWithTopology) from htmresearch.frameworks.layers.l2456_network_creation import ( createL2456Columns ) from htmresearch.frameworks.layers.combined_sequence_network_creation import ( createL4L2TMColumn ) from htmresearch.support.register_regions import registerAllResearchRegions def createNetwork(networkConfig): """ Create and initialize the specified network instance. @param networkConfig: (dict) the configuration of this network. @return network: (Network) The actual network """ registerAllResearchRegions() network = Network() if networkConfig["networkType"] == "L4L2Column": return createL4L2Column(network, networkConfig, "_0") elif networkConfig["networkType"] == "MultipleL4L2Columns": return createMultipleL4L2Columns(network, networkConfig) elif networkConfig["networkType"] == "MultipleL4L2ColumnsWithTopology": return createMultipleL4L2ColumnsWithTopology(network, networkConfig) elif networkConfig["networkType"] == "L2456Columns": return createL2456Columns(network, networkConfig) elif networkConfig["networkType"] == "L4L2TMColumn": return createL4L2TMColumn(network, networkConfig, "_0") def printNetwork(network): """ Given a network, print out regions sorted by phase """ print "The network has",len(network.regions.values()),"regions" for p in range(network.getMaxPhase()): print "=== Phase",p for region in network.regions.values(): if network.getPhases(region.name)[0] == p: print " ",region.name
wfxiang08/django185
refs/heads/master
tests/utils_tests/test_archive.py
372
import os import shutil import tempfile import unittest from django.utils._os import upath from django.utils.archive import Archive, extract TEST_DIR = os.path.join(os.path.dirname(upath(__file__)), 'archives') class ArchiveTester(object): archive = None def setUp(self): """ Create temporary directory for testing extraction. """ self.old_cwd = os.getcwd() self.tmpdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.tmpdir) self.archive_path = os.path.join(TEST_DIR, self.archive) self.archive_lead_path = os.path.join(TEST_DIR, "leadpath_%s" % self.archive) # Always start off in TEST_DIR. os.chdir(TEST_DIR) def tearDown(self): os.chdir(self.old_cwd) def test_extract_method(self): with Archive(self.archive) as archive: archive.extract(self.tmpdir) self.check_files(self.tmpdir) def test_extract_method_no_to_path(self): os.chdir(self.tmpdir) with Archive(self.archive_path) as archive: archive.extract() self.check_files(self.tmpdir) def test_extract_function(self): extract(self.archive_path, self.tmpdir) self.check_files(self.tmpdir) def test_extract_function_with_leadpath(self): extract(self.archive_lead_path, self.tmpdir) self.check_files(self.tmpdir) def test_extract_function_no_to_path(self): os.chdir(self.tmpdir) extract(self.archive_path) self.check_files(self.tmpdir) def check_files(self, tmpdir): self.assertTrue(os.path.isfile(os.path.join(self.tmpdir, '1'))) self.assertTrue(os.path.isfile(os.path.join(self.tmpdir, '2'))) self.assertTrue(os.path.isfile(os.path.join(self.tmpdir, 'foo', '1'))) self.assertTrue(os.path.isfile(os.path.join(self.tmpdir, 'foo', '2'))) self.assertTrue(os.path.isfile(os.path.join(self.tmpdir, 'foo', 'bar', '1'))) self.assertTrue(os.path.isfile(os.path.join(self.tmpdir, 'foo', 'bar', '2'))) class TestZip(ArchiveTester, unittest.TestCase): archive = 'foobar.zip' class TestTar(ArchiveTester, unittest.TestCase): archive = 'foobar.tar' class TestGzipTar(ArchiveTester, unittest.TestCase): archive = 'foobar.tar.gz' class TestBzip2Tar(ArchiveTester, unittest.TestCase): archive = 'foobar.tar.bz2'
4talesa/rethinkdb
refs/heads/next
external/v8_3.30.33.16/tools/nacl-run.py
77
#!/usr/bin/env python # # Copyright 2013 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # This script executes the passed command line using the Native Client # 'sel_ldr' container. It is derived from android-run.py. import os from os.path import join, dirname, abspath import re import subprocess import sys import tempfile def Check(output, errors): failed = any([s.startswith('/system/bin/sh:') or s.startswith('ANDROID') for s in output.split('\n')]) return 1 if failed else 0 def Execute(cmdline): (fd_out, outname) = tempfile.mkstemp() (fd_err, errname) = tempfile.mkstemp() process = subprocess.Popen( args=cmdline, shell=True, stdout=fd_out, stderr=fd_err, ) exit_code = process.wait() os.close(fd_out) os.close(fd_err) output = file(outname).read() errors = file(errname).read() os.unlink(outname) os.unlink(errname) sys.stdout.write(output) sys.stderr.write(errors) return exit_code or Check(output, errors) def Escape(arg): def ShouldEscape(): for x in arg: if not x.isalnum() and x != '-' and x != '_': return True return False return arg if not ShouldEscape() else '"%s"' % (arg.replace('"', '\\"')) def WriteToTemporaryFile(data): (fd, fname) = tempfile.mkstemp() os.close(fd) tmp_file = open(fname, "w") tmp_file.write(data) tmp_file.close() return fname def GetNaClArchFromNexe(nexe): try: p = subprocess.Popen(['file', nexe], stdout=subprocess.PIPE) out, err = p.communicate() lines = [re.sub("\s+", " " , line) for line in out.split('\n')] if lines[0].find(": ELF 32-bit LSB executable, Intel 80386") > 0: return "x86_32" if lines[0].find(": ELF 64-bit LSB executable, x86-64") > 0: return "x86_64" except: print 'file ' + sys.argv[1] + ' failed' return None def GetNaClResources(nexe): nacl_sdk_dir = os.environ["NACL_SDK_ROOT"] nacl_arch = GetNaClArchFromNexe(nexe) if sys.platform.startswith("linux"): platform = "linux" elif sys.platform == "darwin": platform = "mac" else: print("NaCl V8 testing is supported on Linux and MacOS only.") sys.exit(1) if nacl_arch is "x86_64": toolchain = platform + "_x86_glibc" sel_ldr = "sel_ldr_x86_64" irt = "irt_core_x86_64.nexe" libdir = "lib64" elif nacl_arch is "x86_32": toolchain = platform + "_x86_glibc" sel_ldr = "sel_ldr_x86_32" irt = "irt_core_x86_32.nexe" libdir = "lib32" elif nacl_arch is "arm": print("NaCl V8 ARM support is not ready yet.") sys.exit(1) else: print("Invalid nexe %s with NaCl arch %s" % (nexe, nacl_arch)) sys.exit(1) nacl_sel_ldr = os.path.join(nacl_sdk_dir, "tools", sel_ldr) nacl_irt = os.path.join(nacl_sdk_dir, "tools", irt) return (nacl_sdk_dir, nacl_sel_ldr, nacl_irt) def Main(): if (len(sys.argv) == 1): print("Usage: %s <command-to-run-on-device>" % sys.argv[0]) return 1 args = [Escape(arg) for arg in sys.argv[1:]] (nacl_sdk_dir, nacl_sel_ldr, nacl_irt) = GetNaClResources(sys.argv[1]) # sel_ldr Options: # -c -c: disable validation (for performance) # -a: allow file access # -B <irt>: load the IRT command = ' '.join([nacl_sel_ldr, '-c', '-c', '-a', '-B', nacl_irt, '--'] + args) error_code = Execute(command) return error_code if __name__ == '__main__': sys.exit(Main())
MaximeGir/StarTrekCorpora
refs/heads/master
utils/email_utils.py
1
from email_validator import validate_email from flask_app.flask_app import config class EmailValidator(object): def __init__(self): self.blacklisted_domains = config.MAIL['BLACKLISTED_DOMAINS'] self.is_valid = False def is_blacklisted(self, domain): return domain in self.blacklisted_domains def validate(self, email): ascii_mail = validate_email(email, check_deliverability=True) if not self.is_blacklisted(ascii_mail['domain']): self.is_valid = True
rahul-c1/scikit-learn
refs/heads/master
benchmarks/bench_isotonic.py
268
""" Benchmarks of isotonic regression performance. We generate a synthetic dataset of size 10^n, for n in [min, max], and examine the time taken to run isotonic regression over the dataset. The timings are then output to stdout, or visualized on a log-log scale with matplotlib. This alows the scaling of the algorithm with the problem size to be visualized and understood. """ from __future__ import print_function import numpy as np import gc from datetime import datetime from sklearn.isotonic import isotonic_regression from sklearn.utils.bench import total_seconds import matplotlib.pyplot as plt import argparse def generate_perturbed_logarithm_dataset(size): return np.random.randint(-50, 50, size=n) \ + 50. * np.log(1 + np.arange(n)) def generate_logistic_dataset(size): X = np.sort(np.random.normal(size=size)) return np.random.random(size=size) < 1.0 / (1.0 + np.exp(-X)) DATASET_GENERATORS = { 'perturbed_logarithm': generate_perturbed_logarithm_dataset, 'logistic': generate_logistic_dataset } def bench_isotonic_regression(Y): """ Runs a single iteration of isotonic regression on the input data, and reports the total time taken (in seconds). """ gc.collect() tstart = datetime.now() isotonic_regression(Y) delta = datetime.now() - tstart return total_seconds(delta) if __name__ == '__main__': parser = argparse.ArgumentParser( description="Isotonic Regression benchmark tool") parser.add_argument('--iterations', type=int, required=True, help="Number of iterations to average timings over " "for each problem size") parser.add_argument('--log_min_problem_size', type=int, required=True, help="Base 10 logarithm of the minimum problem size") parser.add_argument('--log_max_problem_size', type=int, required=True, help="Base 10 logarithm of the maximum problem size") parser.add_argument('--show_plot', action='store_true', help="Plot timing output with matplotlib") parser.add_argument('--dataset', choices=DATASET_GENERATORS.keys(), required=True) args = parser.parse_args() timings = [] for exponent in range(args.log_min_problem_size, args.log_max_problem_size): n = 10 ** exponent Y = DATASET_GENERATORS[args.dataset](n) time_per_iteration = \ [bench_isotonic_regression(Y) for i in range(args.iterations)] timing = (n, np.mean(time_per_iteration)) timings.append(timing) # If we're not plotting, dump the timing to stdout if not args.show_plot: print(n, np.mean(time_per_iteration)) if args.show_plot: plt.plot(*zip(*timings)) plt.title("Average time taken running isotonic regression") plt.xlabel('Number of observations') plt.ylabel('Time (s)') plt.axis('tight') plt.loglog() plt.show()
tuenti/Diamond
refs/heads/master
src/diamond/handler/cloudwatch.py
27
# coding=utf-8 """ Output the collected values to AWS CloudWatch Automatically adds the InstanceID Dimension #### Dependencies * [boto](http://boto.readthedocs.org/en/latest/index.html) #### Configuration Enable this handler * handers = diamond.handler.cloudwatch.cloudwatchHandler Example Config: [[cloudwatchHandler]] region = us-east-1 [[[LoadAvg01]]] collector = loadavg metric = 01 namespace = MachineLoad name = Avg01 unit = None [[[LoadAvg05]]] collector = loadavg metric = 05 namespace = MachineLoad name = Avg05 unit = None """ import sys import datetime from Handler import Handler from configobj import Section try: import boto import boto.ec2.cloudwatch import boto.utils except ImportError: boto = None class cloudwatchHandler(Handler): """ Implements the abstract Handler class Sending data to a AWS CloudWatch """ def __init__(self, config=None): """ Create a new instance of cloudwatchHandler class """ # Initialize Handler Handler.__init__(self, config) if not boto: self.log.error( "CloudWatch: Boto is not installed, please install boto.") return # Initialize Data self.connection = None # Initialize Options self.region = self.config['region'] instances = boto.utils.get_instance_metadata() if 'instance-id' not in instances: self.log.error('CloudWatch: Failed to load instance metadata') return self.instance_id = instances['instance-id'] self.log.debug("Setting InstanceID: " + self.instance_id) self.valid_config = ('region', 'collector', 'metric', 'namespace', 'name', 'unit') self.rules = [] for key_name, section in self.config.items(): if section.__class__ is Section: keys = section.keys() rules = {} for key in keys: if key not in self.valid_config: self.log.warning("invalid key %s in section %s", key, section.name) else: rules[key] = section[key] self.rules.append(rules) # Create CloudWatch Connection self._bind() def get_default_config_help(self): """ Returns the help text for the configuration options for this handler """ config = super(cloudwatchHandler, self).get_default_config_help() config.update({ 'region': '', 'metric': '', 'namespace': '', 'name': '', 'unit': '', 'collector': '', }) return config def get_default_config(self): """ Return the default config for the handler """ config = super(cloudwatchHandler, self).get_default_config() config.update({ 'region': 'us-east-1', 'collector': 'loadavg', 'metric': '01', 'namespace': 'MachineLoad', 'name': 'Avg01', 'unit': 'None', }) return config def _bind(self): """ Create CloudWatch Connection """ self.log.debug( "CloudWatch: Attempting to connect to CloudWatch at Region: %s", self.region) try: self.connection = boto.ec2.cloudwatch.connect_to_region(self.region) self.log.debug( "CloudWatch: Succesfully Connected to CloudWatch at Region: %s", self.region) except boto.exception.EC2ResponseError: self.log.error('CloudWatch: CloudWatch Exception Handler: ') def __del__(self): """ Destroy instance of the cloudWatchHandler class """ try: self.connection = None except AttributeError: pass def process(self, metric): """ Process a metric and send it to CloudWatch """ if not boto: return collector = str(metric.getCollectorPath()) metricname = str(metric.getMetricPath()) timestamp = datetime.datetime.fromtimestamp(metric.timestamp) # Send the data as ...... for rule in self.rules: self.log.debug( "Comparing Collector: [%s] with (%s) " "and Metric: [%s] with (%s)", str(rule['collector']), collector, str(rule['metric']), metricname ) if (str(rule['collector']) == collector and str(rule['metric']) == metricname): self.log.debug( "CloudWatch: Attempting to publish metric: %s to %s " "with value (%s) @%s", rule['name'], rule['namespace'], str(metric.value), str(metric.timestamp) ) try: self.connection.put_metric_data( str(rule['namespace']), str(rule['name']), str(metric.value), timestamp, str(rule['unit']), {'InstanceID': self.instance_id}) self.log.debug( "CloudWatch: Successfully published metric: %s to" " %s with value (%s)", rule['name'], rule['namespace'], str(metric.value) ) except AttributeError, e: self.log.error( "CloudWatch: Failed publishing - %s ", str(e)) except Exception: # Rough connection re-try logic. self.log.error( "CloudWatch: Failed publishing - %s ", str(sys.exc_info()[0])) self._bind()
avtoritet/scrapy
refs/heads/master
scrapy/contrib/statsmailer.py
144
import warnings from scrapy.exceptions import ScrapyDeprecationWarning warnings.warn("Module `scrapy.contrib.statsmailer` is deprecated, " "use `scrapy.extensions.statsmailer` instead", ScrapyDeprecationWarning, stacklevel=2) from scrapy.extensions.statsmailer import *
dsaraujo/circulante
refs/heads/master
django/contrib/sessions/middleware.py
323
import time from django.conf import settings from django.utils.cache import patch_vary_headers from django.utils.http import cookie_date from django.utils.importlib import import_module class SessionMiddleware(object): def process_request(self, request): engine = import_module(settings.SESSION_ENGINE) session_key = request.COOKIES.get(settings.SESSION_COOKIE_NAME, None) request.session = engine.SessionStore(session_key) def process_response(self, request, response): """ If request.session was modified, or if the configuration is to save the session every time, save the changes and set a session cookie. """ try: accessed = request.session.accessed modified = request.session.modified except AttributeError: pass else: if accessed: patch_vary_headers(response, ('Cookie',)) if modified or settings.SESSION_SAVE_EVERY_REQUEST: if request.session.get_expire_at_browser_close(): max_age = None expires = None else: max_age = request.session.get_expiry_age() expires_time = time.time() + max_age expires = cookie_date(expires_time) # Save the session data and refresh the client cookie. request.session.save() response.set_cookie(settings.SESSION_COOKIE_NAME, request.session.session_key, max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, path=settings.SESSION_COOKIE_PATH, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None) return response
xujun10110/golismero
refs/heads/master
thirdparty_libs/requests/packages/charade/jisfreq.py
3130
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # # Japanese frequency table, applied to both S-JIS and EUC-JP # They are sorted in order. # 128 --> 0.77094 # 256 --> 0.85710 # 512 --> 0.92635 # 1024 --> 0.97130 # 2048 --> 0.99431 # # Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 # Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 # # Typical Distribution Ratio, 25% of IDR JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 # Char to FreqOrder table , JIS_TABLE_SIZE = 4368 JISCharToFreqOrder = ( 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 #Everything below is of no interest for detection purpose 2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384 6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400 6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416 6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432 6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448 4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464 4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480 3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496 3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512 4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528 3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544 6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560 4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576 6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592 6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608 6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624 6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640 6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656 6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672 3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688 3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704 6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720 2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736 4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752 4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768 4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784 6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800 3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816 4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832 4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848 6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864 4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880 6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896 3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912 2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928 4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944 2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960 6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976 4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992 6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008 6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024 6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040 4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056 6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072 2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088 6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104 4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120 6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136 4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152 4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168 6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184 6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200 6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216 3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232 1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248 3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264 3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280 4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296 6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312 3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328 6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344 3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360 3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376 2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392 6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408 6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424 3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440 6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456 3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472 6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488 6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504 6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520 4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536 6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552 4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568 3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584 3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600 6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616 6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632 4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648 6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664 6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680 6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696 6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712 6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728 6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744 4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760 4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776 3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792 6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808 4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824 2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840 6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856 6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872 4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888 2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904 4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920 2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936 4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952 4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968 4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984 6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000 3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016 6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032 3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048 6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064 2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080 3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096 7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112 2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128 3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144 3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160 3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176 3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192 7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208 7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224 7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240 7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256 7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272 4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288 3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304 3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320 4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336 3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352 3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368 7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384 4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400 7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416 7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432 7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448 7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464 7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480 4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496 4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512 7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528 3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544 4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560 7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576 7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592 4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608 3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624 3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640 7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656 4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672 4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688 4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704 4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720 4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736 4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752 7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768 7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784 7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800 7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816 7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832 2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848 3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864 7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880 7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896 3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912 4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928 3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944 3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960 2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976 7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992 7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008 4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024 3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040 3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056 7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072 7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088 7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104 4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120 7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136 2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152 3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168 4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184 7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200 4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216 4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232 7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248 7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264 5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280 7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296 7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312 7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328 7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344 7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360 5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376 5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392 7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408 3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424 7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440 7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456 3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472 7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488 7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504 1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520 3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536 4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552 2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568 3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584 2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600 5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616 4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632 4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648 5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664 7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680 7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696 7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712 7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728 3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744 7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760 3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776 7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792 4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808 7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824 7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840 7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856 7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872 7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888 7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904 7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920 7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936 7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952 7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968 7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984 7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000 8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016 8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032 8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048 8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064 8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080 8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096 8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112 8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128 8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144 8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160 8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176 8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192 8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208 8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224 8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240 8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256 8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272 # flake8: noqa
2013Commons/HUE-SHARK
refs/heads/master
desktop/core/ext-py/Django-1.2.3/build/lib.linux-i686-2.7/django/conf/locale/bg/formats.py
36
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # DATE_FORMAT = 'd F Y' TIME_FORMAT = 'H:i:s' # DATETIME_FORMAT = # YEAR_MONTH_FORMAT = MONTH_DAY_FORMAT = 'j F' SHORT_DATE_FORMAT = 'd.m.Y' # SHORT_DATETIME_FORMAT = # FIRST_DAY_OF_WEEK = # DATE_INPUT_FORMATS = # TIME_INPUT_FORMATS = # DATETIME_INPUT_FORMATS = DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = ' ' # NUMBER_GROUPING =
smsisko/Voodoo-Mock
refs/heads/master
voodoo/voodoodbiterator.py
2
import shelve import iterateapi import atexit import filelock class VoodooDBIterator( iterateapi.IterateAPI ): def __init__( self, perFileSettingsNotUsed, dbFilename ): self._dbFilename = dbFilename iterateapi.IterateAPI.__init__( self ) self._db = {} atexit.register( self._atExit ) def _atExit( self ): if not self._dbFilename: return with filelock.FileLock( self._dbFilename, timeout = 2 ) as lock: db = shelve.open( self._dbFilename, "c" ) db.update( self._db ) db.close() def structForwardDeclaration( self, ** kwargs ): pass def enterStruct( self, ** kwargs ): pass def leaveStruct( self, ** kwargs ): pass def enterClass( self, ** kwargs ): pass def leaveClass( self, ** kwargs ): pass def variableDeclaration( self, ** kwargs ): pass def typedef( self, ** kwargs ): pass def enum( self, ** kwargs ): pass def fieldDeclaration( self, ** kwargs ): pass def enterNamespace( self, ** kwargs ): pass def leaveNamespace( self, ** kwargs ): pass def accessSpec( self, ** kwargs ): pass def using( self, ** kwargs ): pass def functionForwardDeclaration( self, decomposition ): self._db[ self._fullIdentifier( decomposition.name ) ] = decomposition def functionDefinition( self, decomposition ): self._db[ self._fullIdentifier( decomposition.name ) ] = decomposition def constructorDefinition( self, decomposition ): self._db[ self._fullIdentifier( decomposition.name ) ] = decomposition def method( self, decomposition ): self._db[ self._fullIdentifier( decomposition.name ) ] = decomposition def _fullIdentifier( self, identifier ): return "::".join( [ identifier ] )
jscissr/djangae
refs/heads/master
djangae/noseplugin.py
21
from nose.plugins import Plugin from djangae.test_runner import init_testbed class DjangaePlugin(Plugin): enabled = True def configure(self, options, conf): pass def startTest(self, test): self.bed = init_testbed() def stopTest(self, test): self.bed.deactivate()
westinedu/newertrends
refs/heads/master
django/forms/util.py
311
from django.utils.html import conditional_escape from django.utils.encoding import StrAndUnicode, force_unicode from django.utils.safestring import mark_safe # Import ValidationError so that it can be imported from this # module to maintain backwards compatibility. from django.core.exceptions import ValidationError def flatatt(attrs): """ Convert a dictionary of attributes to a single string. The returned string will contain a leading space followed by key="value", XML-style pairs. It is assumed that the keys do not need to be XML-escaped. If the passed dictionary is empty, then return an empty string. """ return u''.join([u' %s="%s"' % (k, conditional_escape(v)) for k, v in attrs.items()]) class ErrorDict(dict, StrAndUnicode): """ A collection of errors that knows how to display itself in various formats. The dictionary keys are the field names, and the values are the errors. """ def __unicode__(self): return self.as_ul() def as_ul(self): if not self: return u'' return mark_safe(u'<ul class="errorlist">%s</ul>' % ''.join([u'<li>%s%s</li>' % (k, force_unicode(v)) for k, v in self.items()])) def as_text(self): return u'\n'.join([u'* %s\n%s' % (k, u'\n'.join([u' * %s' % force_unicode(i) for i in v])) for k, v in self.items()]) class ErrorList(list, StrAndUnicode): """ A collection of errors that knows how to display itself in various formats. """ def __unicode__(self): return self.as_ul() def as_ul(self): if not self: return u'' return mark_safe(u'<ul class="errorlist">%s</ul>' % ''.join([u'<li>%s</li>' % conditional_escape(force_unicode(e)) for e in self])) def as_text(self): if not self: return u'' return u'\n'.join([u'* %s' % force_unicode(e) for e in self]) def __repr__(self): return repr([force_unicode(e) for e in self])
monuszko/django-nyancat
refs/heads/master
nyancat/migrations/0002_person_email.py
1
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('nyancat', '0001_initial'), ] operations = [ migrations.AddField( model_name='person', name='email', field=models.EmailField(max_length=254, blank=True), ), ]
joerocklin/gem5
refs/heads/master
src/mem/ruby/network/BasicRouter.py
53
# Copyright (c) 2011 Advanced Micro Devices, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Steve Reinhardt # Brad Beckmann from m5.params import * from ClockedObject import ClockedObject class BasicRouter(ClockedObject): type = 'BasicRouter' cxx_header = "mem/ruby/network/BasicRouter.hh" router_id = Param.Int("ID in relation to other routers")
patcon/open-cabinet
refs/heads/master
venv/lib/python2.7/site-packages/django/conf/locale/mn/formats.py
619
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'd F Y' TIME_FORMAT = 'g:i A' # DATETIME_FORMAT = # YEAR_MONTH_FORMAT = # MONTH_DAY_FORMAT = SHORT_DATE_FORMAT = 'j M Y' # SHORT_DATETIME_FORMAT = # FIRST_DAY_OF_WEEK = # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior # DATE_INPUT_FORMATS = # TIME_INPUT_FORMATS = # DATETIME_INPUT_FORMATS = # DECIMAL_SEPARATOR = # THOUSAND_SEPARATOR = # NUMBER_GROUPING =
s0lst1c3/eaphammer
refs/heads/master
core/redirect_server.py
1
import time import sys import random import string from http.server import BaseHTTPRequestHandler, HTTPServer from socketserver import ThreadingMixIn from multiprocessing import Process upper_alnum = string.ascii_uppercase + string.digits bind_addr = None bind_port = None class RedirectHandler(BaseHTTPRequestHandler): def do_HEAD(s): s.send_response(302) share_path = ''.join(random.choice(upper_alnum) for _ in range(8)) new_location = 'file://%s:%s/%s' % (bind_addr, bind_port, share_path) s.send_header('Location', new_location) s.end_headers() def do_GET(s): s.do_HEAD() def do_POST(s): s.do_HEAD() def do_OPTIONS(s): s.do_HEAD() def do_PUT(s): s.do_HEAD() class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): ''' yay ''' class RedirectServer(object): instance = None @staticmethod def get_instance(): if RedirectServer.instance is None: instance = RedirectServer() return instance def configure(self, lbind_addr, lbind_port=80): global bind_addr global bind_port self.bind_addr = lbind_addr self.bind_port = lbind_port bind_addr = lbind_addr bind_port = lbind_port @staticmethod def _start(bind_addr, bind_port): server_class = ThreadedHTTPServer httpd = server_class((bind_addr, bind_port), RedirectHandler) try: httpd.serve_forever() except KeyboardInterrupt: pass httpd.server_close() def start(self): args = (self.bind_addr, self.bind_port,) self.proc = Process(target=self._start, args=args) self.proc.daemon = True self.proc.start() time.sleep(4) def stop(self): self.proc.terminate() self.proc.join()
caldwell/servo
refs/heads/master
tests/wpt/css-tests/tools/html5lib/html5lib/serializer/__init__.py
1731
from __future__ import absolute_import, division, unicode_literals from .. import treewalkers from .htmlserializer import HTMLSerializer def serialize(input, tree="etree", format="html", encoding=None, **serializer_opts): # XXX: Should we cache this? walker = treewalkers.getTreeWalker(tree) if format == "html": s = HTMLSerializer(**serializer_opts) else: raise ValueError("type must be html") return s.render(walker(input), encoding)
zengenti/ansible
refs/heads/devel
lib/ansible/modules/storage/netapp/__init__.py
12133432
adelton/django
refs/heads/master
django/contrib/admin/decorators.py
558
def register(*models, **kwargs): """ Registers the given model(s) classes and wrapped ModelAdmin class with admin site: @register(Author) class AuthorAdmin(admin.ModelAdmin): pass A kwarg of `site` can be passed as the admin site, otherwise the default admin site will be used. """ from django.contrib.admin import ModelAdmin from django.contrib.admin.sites import site, AdminSite def _model_admin_wrapper(admin_class): admin_site = kwargs.pop('site', site) if not isinstance(admin_site, AdminSite): raise ValueError('site must subclass AdminSite') if not issubclass(admin_class, ModelAdmin): raise ValueError('Wrapped class must subclass ModelAdmin.') admin_site.register(models, admin_class=admin_class) return admin_class return _model_admin_wrapper
bspink/django
refs/heads/master
django/conf/project_template/project_name/urls.py
244
"""{{ project_name }} URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/{{ docs_version }}/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ]
lulandco/SickRage
refs/heads/develop
lib/hachoir_parser/misc/chm.py
74
""" InfoTech Storage Format (ITSF) parser, used by Microsoft's HTML Help (.chm) Document: - Microsoft's HTML Help (.chm) format http://www.wotsit.org (search "chm") - chmlib library http://www.jedrea.com/chmlib/ - Unofficial CHM Spec http://savannah.nongnu.org/projects/chmspec - Microsoft's HTML Help (.chm) format http://www.speakeasy.org/~russotto/chm/chmformat.html Author: Victor Stinner Creation date: 2007-03-04 """ from hachoir_core.field import (Field, FieldSet, ParserError, RootSeekableFieldSet, Int32, UInt16, UInt32, UInt64, RawBytes, PaddingBytes, Enum, String) from hachoir_core.endian import LITTLE_ENDIAN from hachoir_parser import HachoirParser from hachoir_parser.common.win32 import GUID from hachoir_parser.common.win32_lang_id import LANGUAGE_ID from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler class CWord(Field): """ Compressed double-word """ def __init__(self, parent, name, description=None): Field.__init__(self, parent, name, 8, description) endian = self._parent.endian stream = self._parent.stream addr = self.absolute_address value = 0 byte = stream.readBits(addr, 8, endian) while byte & 0x80: value <<= 7 value += (byte & 0x7f) self._size += 8 if 64 < self._size: raise ParserError("CHM: CWord is limited to 64 bits") addr += 8 byte = stream.readBits(addr, 8, endian) value <<= 7 value += byte self.createValue = lambda: value class Filesize_Header(FieldSet): def createFields(self): yield textHandler(UInt32(self, "unknown[]", "0x01FE"), hexadecimal) yield textHandler(UInt32(self, "unknown[]", "0x0"), hexadecimal) yield filesizeHandler(UInt64(self, "file_size")) yield textHandler(UInt32(self, "unknown[]", "0x0"), hexadecimal) yield textHandler(UInt32(self, "unknown[]", "0x0"), hexadecimal) class ITSP(FieldSet): def __init__(self, *args): FieldSet.__init__(self, *args) self._size = self["size"].value * 8 def createFields(self): yield String(self, "magic", 4, "ITSP", charset="ASCII") yield UInt32(self, "version", "Version (=1)") yield filesizeHandler(UInt32(self, "size", "Length (in bytes) of the directory header (84)")) yield UInt32(self, "unknown[]", "(=10)") yield filesizeHandler(UInt32(self, "block_size", "Directory block size")) yield UInt32(self, "density", "Density of quickref section, usually 2") yield UInt32(self, "index_depth", "Depth of the index tree") yield Int32(self, "nb_dir", "Chunk number of root index chunk") yield UInt32(self, "first_pmgl", "Chunk number of first PMGL (listing) chunk") yield UInt32(self, "last_pmgl", "Chunk number of last PMGL (listing) chunk") yield Int32(self, "unknown[]", "-1") yield UInt32(self, "nb_dir_chunk", "Number of directory chunks (total)") yield Enum(UInt32(self, "lang_id", "Windows language ID"), LANGUAGE_ID) yield GUID(self, "system_uuid", "{5D02926A-212E-11D0-9DF9-00A0C922E6EC}") yield filesizeHandler(UInt32(self, "size2", "Same value than size")) yield Int32(self, "unknown[]", "-1") yield Int32(self, "unknown[]", "-1") yield Int32(self, "unknown[]", "-1") class ITSF(FieldSet): def createFields(self): yield String(self, "magic", 4, "ITSF", charset="ASCII") yield UInt32(self, "version") yield UInt32(self, "header_size", "Total header length (in bytes)") yield UInt32(self, "one") yield UInt32(self, "last_modified", "Lower 32 bits of the time expressed in units of 0.1 us") yield Enum(UInt32(self, "lang_id", "Windows Language ID"), LANGUAGE_ID) yield GUID(self, "dir_uuid", "{7C01FD10-7BAA-11D0-9E0C-00A0-C922-E6EC}") yield GUID(self, "stream_uuid", "{7C01FD11-7BAA-11D0-9E0C-00A0-C922-E6EC}") yield UInt64(self, "filesize_offset") yield filesizeHandler(UInt64(self, "filesize_len")) yield UInt64(self, "dir_offset") yield filesizeHandler(UInt64(self, "dir_len")) if 3 <= self["version"].value: yield UInt64(self, "data_offset") class PMGL_Entry(FieldSet): def createFields(self): yield CWord(self, "name_len") yield String(self, "name", self["name_len"].value, charset="UTF-8") yield CWord(self, "section", "Section number that the entry data is in.") yield CWord(self, "start", "Start offset of the data") yield filesizeHandler(CWord(self, "length", "Length of the data")) def createDescription(self): return "%s (%s)" % (self["name"].value, self["length"].display) class PMGL(FieldSet): def createFields(self): # Header yield String(self, "magic", 4, "PMGL", charset="ASCII") yield filesizeHandler(Int32(self, "free_space", "Length of free space and/or quickref area at end of directory chunk")) yield Int32(self, "unknown") yield Int32(self, "previous", "Chunk number of previous listing chunk") yield Int32(self, "next", "Chunk number of previous listing chunk") # Entries stop = self.size - self["free_space"].value * 8 entry_count = 0 while self.current_size < stop: yield PMGL_Entry(self, "entry[]") entry_count+=1 # Padding quickref_frequency = 1 + (1 << self["/dir/itsp/density"].value) num_quickref = (entry_count // quickref_frequency) if entry_count % quickref_frequency == 0: num_quickref -= 1 print self.current_size//8, quickref_frequency, num_quickref padding = (self["free_space"].value - (num_quickref*2+2)) if padding: yield PaddingBytes(self, "padding", padding) for i in range(num_quickref*quickref_frequency, 0, -quickref_frequency): yield UInt16(self, "quickref[%i]"%i) yield UInt16(self, "entry_count") class PMGI_Entry(FieldSet): def createFields(self): yield CWord(self, "name_len") yield String(self, "name", self["name_len"].value, charset="UTF-8") yield CWord(self, "page") def createDescription(self): return "%s (page #%u)" % (self["name"].value, self["page"].value) class PMGI(FieldSet): def createFields(self): yield String(self, "magic", 4, "PMGI", charset="ASCII") yield filesizeHandler(UInt32(self, "free_space", "Length of free space and/or quickref area at end of directory chunk")) stop = self.size - self["free_space"].value * 8 while self.current_size < stop: yield PMGI_Entry(self, "entry[]") padding = (self.size - self.current_size) // 8 if padding: yield PaddingBytes(self, "padding", padding) class Directory(FieldSet): def createFields(self): yield ITSP(self, "itsp") block_size = self["itsp/block_size"].value * 8 nb_dir = self["itsp/nb_dir"].value if nb_dir < 0: nb_dir = 1 for index in xrange(nb_dir): yield PMGL(self, "pmgl[]", size=block_size) if self.current_size < self.size: yield PMGI(self, "pmgi", size=block_size) class NameList(FieldSet): def createFields(self): yield UInt16(self, "length", "Length of name list in 2-byte blocks") yield UInt16(self, "count", "Number of entries in name list") for index in range(self["count"].value): length=UInt16(self, "name_len[]", "Length of name in 2-byte blocks, excluding terminating null") yield length yield String(self, "name[]", length.value*2+2, charset="UTF-16-LE") class ControlData(FieldSet): def createFields(self): yield UInt32(self, "count", "Number of DWORDS in this struct") yield String(self, "type", 4, "Type of compression") if self["type"].value!='LZXC': return yield UInt32(self, "version", "Compression version") version=self["version"].value if version==1: block='bytes' else: block='32KB blocks' yield UInt32(self, "reset_interval", "LZX: Reset interval in %s"%block) yield UInt32(self, "window_size", "LZX: Window size in %s"%block) yield UInt32(self, "cache_size", "LZX: Cache size in %s"%block) yield UInt32(self, "unknown[]") class ResetTable(FieldSet): def createFields(self): yield UInt32(self, "unknown[]", "Version number?") yield UInt32(self, "count", "Number of entries") yield UInt32(self, "entry_size", "Size of each entry") yield UInt32(self, "header_size", "Size of this header") yield UInt64(self, "uncompressed_size") yield UInt64(self, "compressed_size") yield UInt64(self, "block_size", "Block size in bytes") for i in xrange(self["count"].value): yield UInt64(self, "block_location[]", "location in compressed data of 1st block boundary in uncompressed data") class SystemEntry(FieldSet): ENTRY_TYPE={0:"HHP: [OPTIONS]: Contents File", 1:"HHP: [OPTIONS]: Index File", 2:"HHP: [OPTIONS]: Default Topic", 3:"HHP: [OPTIONS]: Title", 4:"File Metadata", 5:"HHP: [OPTIONS]: Default Window", 6:"HHP: [OPTIONS]: Compiled file", # 7 present only in files with Binary Index; unknown function # 8 unknown function 9: "Version", 10: "Timestamp", # 11 only in Binary TOC files 12: "Number of Info Types", 13: "#IDXHDR file", # 14 unknown function # 15 checksum?? 16:"HHP: [OPTIONS]: Default Font", } def createFields(self): yield Enum(UInt16(self, "type", "Type of entry"),self.ENTRY_TYPE) yield UInt16(self, "length", "Length of entry") yield RawBytes(self, "data", self["length"].value) def createDescription(self): return '#SYSTEM Entry, Type %s'%self["type"].display class SystemFile(FieldSet): def createFields(self): yield UInt32(self, "version", "Either 2 or 3") while self.current_size < self.size: yield SystemEntry(self, "entry[]") class ChmFile(HachoirParser, RootSeekableFieldSet): MAGIC = "ITSF\3\0\0\0" PARSER_TAGS = { "id": "chm", "category": "misc", "file_ext": ("chm",), "min_size": 4*8, "magic": ((MAGIC, 0),), "description": "Microsoft's HTML Help (.chm)", } endian = LITTLE_ENDIAN def __init__(self, stream, **args): RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self)) HachoirParser.__init__(self, stream, **args) def validate(self): if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC: return "Invalid magic" return True def createFields(self): yield ITSF(self, "itsf") yield Filesize_Header(self, "file_size", size=self["itsf/filesize_len"].value*8) self.seekByte(self["itsf/dir_offset"].value) directory=Directory(self, "dir", size=self["itsf/dir_len"].value*8) yield directory otherentries = {} for pmgl in directory.array("pmgl"): for entry in pmgl.array("entry"): if entry["section"].value != 0: otherentries.setdefault(entry["section"].value,[]).append(entry) continue if entry["length"].value == 0: continue self.seekByte(self["itsf/data_offset"].value+entry["start"].value) name = entry["name"].value if name == "::DataSpace/NameList": yield NameList(self, "name_list") elif name.startswith('::DataSpace/Storage/'): sectname = str(name.split('/')[2]) if name.endswith('/SpanInfo'): yield UInt64(self, "%s_spaninfo"%sectname, "Size of uncompressed data in the %s section"%sectname) elif name.endswith('/ControlData'): yield ControlData(self, "%s_controldata"%sectname, "Data about the compression scheme", size=entry["length"].value*8) elif name.endswith('/Transform/List'): yield String(self, "%s_transform_list"%sectname, 38, description="Transform/List element", charset="UTF-16-LE") elif name.endswith('/Transform/{7FC28940-9D31-11D0-9B27-00A0C91E9C7C}/InstanceData/ResetTable'): yield ResetTable(self, "%s_reset_table"%sectname, "LZX Reset Table", size=entry["length"].value*8) elif name.endswith('/Content'): # eventually, a LZX wrapper will appear here, we hope! yield RawBytes(self, "%s_content"%sectname, entry["length"].value, "Content for the %s section"%sectname) else: yield RawBytes(self, "entry_data[]", entry["length"].value, name) elif name=="/#SYSTEM": yield SystemFile(self, "system_file", size=entry["length"].value*8) else: yield RawBytes(self, "entry_data[]", entry["length"].value, name) def getFile(self, filename): page=0 if 'pmgi' in self['/dir']: for entry in self['/dir/pmgi'].array('entry'): if entry['name'].value <= filename: page=entry['page'].value pmgl=self['/dir/pmgl[%i]'%page] for entry in pmgl.array('entry'): if entry['name'].value == filename: return entry raise ParserError("File '%s' not found!"%filename) def createContentSize(self): return self["file_size/file_size"].value * 8
AntonPalich/sublime-evernote
refs/heads/master
lib/pygments/lexers/__init__.py
9
# -*- coding: utf-8 -*- """ pygments.lexers ~~~~~~~~~~~~~~~ Pygments lexers. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys import types import fnmatch import re from os.path import basename from pygments.lexers._mapping import LEXERS from pygments.modeline import get_filetype_from_buffer from pygments.plugin import find_plugin_lexers from pygments.util import ClassNotFound, itervalues __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', 'guess_lexer'] + list(LEXERS) _lexer_cache = {} _pattern_cache = {} def _fn_matches(fn, glob): """ Return whether the supplied file name fn matches pattern filename """ if glob not in _pattern_cache: pattern = re.compile(fnmatch.translate(glob)) _pattern_cache[glob] = pattern else: pattern = _pattern_cache[glob] return pattern.match(fn) def _load_lexers(module_name): """ Load a lexer (and all others in the module too). """ mod = __import__(module_name, None, None, ['__all__']) for lexer_name in mod.__all__: cls = getattr(mod, lexer_name) _lexer_cache[cls.name] = cls def get_all_lexers(): """ Return a generator of tuples in the form ``(name, aliases, filenames, mimetypes)`` of all know lexers. """ for item in itervalues(LEXERS): yield item[1:] for lexer in find_plugin_lexers(): yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes def find_lexer_class(name): """ Lookup a lexer class by name. Return None if not found. """ if name in _lexer_cache: return _lexer_cache[name] # lookup builtin lexers for module_name, lname, aliases, _, _ in itervalues(LEXERS): if name == lname: _load_lexers(module_name) return _lexer_cache[name] # continue with lexers from setuptools entrypoints for cls in find_plugin_lexers(): if cls.name == name: return cls def get_lexer_by_name(_alias, **options): """ Get a lexer by an alias. """ # lookup builtin lexers for module_name, name, aliases, _, _ in itervalues(LEXERS): if _alias.lower() in aliases: if name not in _lexer_cache: _load_lexers(module_name) return _lexer_cache[name](**options) # continue with lexers from setuptools entrypoints for cls in find_plugin_lexers(): if _alias in cls.aliases: return cls(**options) raise ClassNotFound('no lexer for alias %r found' % _alias) def get_lexer_for_filename(_fn, code=None, **options): """ Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyze_text()`` to figure out which one is more appropriate. """ matches = [] fn = basename(_fn) for modname, name, _, filenames, _ in itervalues(LEXERS): for filename in filenames: if _fn_matches(fn, filename): if name not in _lexer_cache: _load_lexers(modname) matches.append((_lexer_cache[name], filename)) for cls in find_plugin_lexers(): for filename in cls.filenames: if _fn_matches(fn, filename): matches.append((cls, filename)) if sys.version_info > (3,) and isinstance(code, bytes): # decode it, since all analyse_text functions expect unicode code = code.decode('latin1') def get_rating(info): cls, filename = info # explicit patterns get a bonus bonus = '*' not in filename and 0.5 or 0 # The class _always_ defines analyse_text because it's included in # the Lexer class. The default implementation returns None which # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py # to find lexers which need it overridden. if code: return cls.analyse_text(code) + bonus return cls.priority + bonus if matches: matches.sort(key=get_rating) #print "Possible lexers, after sort:", matches return matches[-1][0](**options) raise ClassNotFound('no lexer for filename %r found' % _fn) def get_lexer_for_mimetype(_mime, **options): """ Get a lexer for a mimetype. """ for modname, name, _, _, mimetypes in itervalues(LEXERS): if _mime in mimetypes: if name not in _lexer_cache: _load_lexers(modname) return _lexer_cache[name](**options) for cls in find_plugin_lexers(): if _mime in cls.mimetypes: return cls(**options) raise ClassNotFound('no lexer for mimetype %r found' % _mime) def _iter_lexerclasses(): """ Return an iterator over all lexer classes. """ for key in sorted(LEXERS): module_name, name = LEXERS[key][:2] if name not in _lexer_cache: _load_lexers(module_name) yield _lexer_cache[name] for lexer in find_plugin_lexers(): yield lexer def guess_lexer_for_filename(_fn, _text, **options): """ Lookup all lexers that handle those filenames primary (``filenames``) or secondary (``alias_filenames``). Then run a text analysis for those lexers and choose the best result. usage:: >>> from pygments.lexers import guess_lexer_for_filename >>> guess_lexer_for_filename('hello.html', '<%= @foo %>') <pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c> >>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>') <pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac> >>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }') <pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c> """ fn = basename(_fn) primary = None matching_lexers = set() for lexer in _iter_lexerclasses(): for filename in lexer.filenames: if _fn_matches(fn, filename): matching_lexers.add(lexer) primary = lexer for filename in lexer.alias_filenames: if _fn_matches(fn, filename): matching_lexers.add(lexer) if not matching_lexers: raise ClassNotFound('no lexer for filename %r found' % fn) if len(matching_lexers) == 1: return matching_lexers.pop()(**options) result = [] for lexer in matching_lexers: rv = lexer.analyse_text(_text) if rv == 1.0: return lexer(**options) result.append((rv, lexer)) # since py3 can no longer sort by class name by default, here is the # sorting function that works in both def type_sort(type_): return (type_[0], type_[1].__name__) result.sort(key=type_sort) if not result[-1][0] and primary is not None: return primary(**options) return result[-1][1](**options) def guess_lexer(_text, **options): """ Guess a lexer by strong distinctions in the text (eg, shebang). """ # try to get a vim modeline first ft = get_filetype_from_buffer(_text) if ft is not None: try: return get_lexer_by_name(ft, **options) except ClassNotFound: pass best_lexer = [0.0, None] for lexer in _iter_lexerclasses(): rv = lexer.analyse_text(_text) if rv == 1.0: return lexer(**options) if rv > best_lexer[0]: best_lexer[:] = (rv, lexer) if not best_lexer[0] or best_lexer[1] is None: raise ClassNotFound('no lexer matching the text found') return best_lexer[1](**options) class _automodule(types.ModuleType): """Automatically import lexers.""" def __getattr__(self, name): info = LEXERS.get(name) if info: _load_lexers(info[0]) cls = _lexer_cache[info[1]] setattr(self, name, cls) return cls raise AttributeError(name) oldmod = sys.modules['pygments.lexers'] newmod = _automodule('pygments.lexers') newmod.__dict__.update(oldmod.__dict__) sys.modules['pygments.lexers'] = newmod del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
cypod/arsenalsuite
refs/heads/master
cpp/apps/bach/plugins/gen_thumbs.py
10
import initbach import bachutil from Bach import * from PyQt4.QtCore import * import os def listMissingThumbs(width): assets = BachAsset.select("exclude=false") for asset in assets: path = asset.path() cachePath = "%s/%s_%sx%s.png" % ("/drd/reference/.thumbnails", path, width,width) if os.path.exists(cachePath): continue if not QFile.exists( QFileInfo(cachePath).absolutePath() ): QDir().mkpath( QFileInfo(cachePath).absolutePath() ) print bachutil.thumbCommand(path, width) listMissingThumbs(256) listMissingThumbs(512)
twister/twister.github.io
refs/heads/master
binaries/PacketSnifferPlugin/PacketSniffer/PacketSnifferClasses.py
4
#!/usr/bin/env python # version: 2.002 # # -*- coding: utf-8 -*- # # File: PacketSniffer.py ; This file is part of Twister. # # Copyright (C) 2012 , Luxoft # # Authors: # Adrian Toader <adtoader@luxoft.com> # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from scapy.all import (Packet, PacketField, ByteField, XByteField, X3BytesField, ByteEnumField, ShortField, XShortField, ShortEnumField, IntField, XIntField, LongField, XLongField, StrField, StrLenField, StrFixedLenField, MACField, SourceMACField, DestMACField, IPField, SourceIPField, ConditionalField) # class _PacketField(StrField): """ Custom Packet Field """ holds_packets = 0 def __init__(self, name, default, cls, length=None): StrField.__init__(self, name, default) self.cls = cls if length is not None: self.length_from = lambda pkt,length=length: length def i2m(self, pkt, i): return str(i) def m2i(self, pkt, m): return self.cls(m) def getfield(self, pkt, s): l = self.length_from(pkt) return s[l:], self.m2i(pkt,s[:l]) # # # # OpenFlow v1.0 # # # # ofp_v1_0_message_type = { 0: 'Hello', 1: 'Error', 2: 'Echo Request', 3: 'Echo Reply', 4: 'Vendor', 5: 'Features Request', 6: 'Features Reply', 7: 'Get Config Request', 8: 'Get Config Reply', 9: 'Set Config', 10: 'Packet Input Notification', 11: 'Flow Removed Notification', 12: 'Port Status Notification', 13: 'Packet Output', 14: 'Flow Modification', 15: 'Port Modification', 16: 'Stats Request', 17: 'Stats Reply', 18: 'Barrier Request', 19: 'Barrier Reply', } ofp_v1_0_action_descriptor_type = { 0: 'Output Action Descriptor', 1: 'VLAN VID Action Descriptor', 2: 'VLAN PCP Action Descriptor', 3: 'Strip VLAN tag Action Descriptor', 4: 'Ethernet Address Action Descriptor (Ethernet source address)', 5: 'Ethernet Address Action Descriptor (Ethernet destination address)', 6: 'IPv4 Address Action Descriptor (IPv4 source address)', 7: 'IPv4 Address Action Descriptor (IPv4 destination address)', 8: 'IPv4 DSCP Action Descriptor', 9: 'TCP/UDP Port Action Descriptor (TCP/UDP source port)', 10: 'TCP/UDP Port Action Descriptor (TCP/UDP destination port)', 65535: 'Vendor Action Descriptor', } ofp_v1_0_stats_requestreply_type = { 0: 'Description of this OpenFlow switch', 1: 'Individual flow statistical information', 2: 'Aggregate flow statistical information', 3: 'Flow table statistical information', 4: 'Port statistical information', } class PortDescriptorField_v1_0(Packet): """ OpenFlow Port Descriptor Field """ name = 'PortDescriptorField_v1_0' fields_desc = [ ShortField('<PortNumber>', None), MACField('EthernetAddress', None), StrFixedLenField('PortDescription', None, 16), XIntField('<PortConfigurationFlags>', None), XIntField('<PortStatusFlags>', None), XIntField('Current<PortFeatureFlags>', None), XIntField('Advertising<PortFeatureFlags>', None), XIntField('Supported<PortFeatureFlags>', None), XIntField('LinkLayerNeighborAdvertising<PortFeatureFlags>', None), ] class FlowMatchDescriptorField_v1_0(Packet): """ OpenFlow Flow Match Descriptor Field """ name = 'FlowMatchDescriptorField_v1_0' fields_desc = [ IntField('<FlowWildcard>', None), ShortField('Ingresss<PortNumber>', None), SourceMACField(MACField('EthernetSourceAddress', None)), DestMACField(MACField('EthernetDestinationAddress', None)), XShortField('<VD+802.1QVID>', None), XByteField('PCP', None), XByteField('Reserved', None), ShortField('EthernetType/Length', None), ByteField('IPv4Protocol', None), X3BytesField('_Reserved', None), SourceIPField('IPv4SourceAddress', None), IPField('IPv4DestinationAddress', None), ShortField('SourcePort', None), ShortField('DestinationPort', None), ] class ActionDescriptorField_v1_0(Packet): """ OpenFlow Action Descriptor Field """ name = 'ActionDescriptorField_v1_0' fields_desc = [ ByteEnumField('Type', None, ofp_v1_0_action_descriptor_type), ShortField('Length', None), # Output Action Descriptor ConditionalField(ShortField('Egress<PortNumber>', None), lambda pkt: pkt.Type == 0), ConditionalField(ShortField('MaxLength', None), lambda pkt: pkt.Type == 0), # VLAN VID Action Descriptor ConditionalField(XShortField('CU+802.1QVID', None), lambda pkt: pkt.Type == 1), ConditionalField(XShortField('VLANVIDReserved', None), lambda pkt: pkt.Type == 1), # VLAN PCP Action Descriptor ConditionalField(XByteField('CU+PCP', None), lambda pkt: pkt.Type == 2), ConditionalField(X3BytesField('VLANPCPReserved', None), lambda pkt: pkt.Type == 2), # Strip VLAN tag Action Descriptor ConditionalField(XIntField('StripVLANTagReserved', None), lambda pkt: pkt.Type == 3), # Ethernet Address Action Descriptor (Ethernet source address) / # Ethernet Address Action Descriptor (Ethernet destination address) ConditionalField(SourceMACField(MACField('EthernetSourceAddress', None)), lambda pkt: pkt.Type == 4), ConditionalField(DestMACField(MACField('EthernetDestinationAddress', None)), lambda pkt: pkt.Type == 5), ConditionalField(StrFixedLenField('EthernetAddressReserved', None, 48), lambda pkt: pkt.Type in [4, 5]), # IPv4 Address Action Descriptor (IPv4 source address) / # IPv4 Address Action Descriptor (IPv4 destination address) ConditionalField(SourceIPField('IPv4SourceAddress', None), lambda pkt: pkt.Type == 6), ConditionalField(IPField('IPv4DestinationAddress', None), lambda pkt: pkt.Type == 7), # IPv4 DSCP Action Descriptor ConditionalField(ByteField('IPv4DSCP+CU', None), lambda pkt: pkt.Type == 8), ConditionalField(X3BytesField('IPv4DSCPReserved', None), lambda pkt: pkt.Type == 8), # TCP/UDP Port Action Descriptor (TCP/UDP source port) / # TCP/UDP Port Action Descriptor (TCP/UDP destination port) ConditionalField(ShortField('SourcePort', None), lambda pkt: pkt.Type == 9), ConditionalField(ShortField('DestinationPort', None), lambda pkt: pkt.Type == 10), ConditionalField(ShortField('TCP/UDPReserved', None), lambda pkt: pkt.Type in [9, 10]), # Vendor Action Descriptor ConditionalField(IntField('VendorID', None), lambda pkt: pkt.Type == 65535), ] class StatsRequestBody_v1_0(Packet): """ OpenFlow Stats Request Body """ name = 'StatsRequestBody_v1_0' fields_desc = [ ShortEnumField('Type', None, ofp_v1_0_stats_requestreply_type), ShortField('Flags', None), _PacketField('<FlowMatchDescriptor>', None, FlowMatchDescriptorField_v1_0, 40), ByteField('TableID', None), XByteField('Reserved', None), ShortField('EgressPortNumber', None) ] class StatsReplyBody_v1_0(Packet): """ OpenFlow Stats Reply Body """ name = 'StatsReplyBody_v1_0' fields_desc = [ ShortEnumField('Type', None, ofp_v1_0_stats_requestreply_type), ShortField('Flags', None), # Description of this OpenFlow switch ConditionalField(StrFixedLenField('ManufacturerDescription', None, 256), lambda pkt: pkt.Type == 0), ConditionalField(StrFixedLenField('HardwareDescription', None, 256), lambda pkt: pkt.Type == 0), ConditionalField(StrFixedLenField('SoftwareDescription', None, 256), lambda pkt: pkt.Type == 0), ConditionalField(IntField('SerialNumberDescription', None), lambda pkt: pkt.Type == 0), ## ?? # Individual flow statistical information / # Aggregate flow statistical information ConditionalField(ShortField('Length', None), lambda pkt: pkt.Type == 1), ConditionalField(ByteField('TableID', None), lambda pkt: pkt.Type == 1), ConditionalField(XByteField('Reserved', None), lambda pkt: pkt.Type == 1), ConditionalField(_PacketField('<FlowMatchDescriptor>', None, FlowMatchDescriptorField_v1_0, 40), lambda pkt: pkt.Type == 1), ConditionalField(IntField('LifetimeDuration', None), lambda pkt: pkt.Type == 1), ConditionalField(ShortField('Priority', None), lambda pkt: pkt.Type == 1), ConditionalField(ShortField('SoftLifetime', None), lambda pkt: pkt.Type == 1), ConditionalField(ShortField('HardLifetime', None), lambda pkt: pkt.Type == 1), ConditionalField(ShortField('Reserved', None), lambda pkt: pkt.Type == 1), ConditionalField(LongField('NumberOfPacketsTransferred', None), lambda pkt: pkt.Type in [1, 2]), ConditionalField(LongField('NumberOfOctetsTransferred', None), lambda pkt: pkt.Type in [1, 2]), ConditionalField(_PacketField('<ActionDescriptors>', None, ActionDescriptorField_v1_0), ## ???? bytes lambda pkt: pkt.Type == 1), ConditionalField(IntField('NumberOfFlows', None), lambda pkt: pkt.Type == 2), ConditionalField(IntField('Reserved', None), lambda pkt: pkt.Type == 2), # Flow table statistical information ConditionalField(ByteField('Table_ID', None), lambda pkt: pkt.Type == 1), ConditionalField(X3BytesField('Reserved', None), lambda pkt: pkt.Type == 3), ConditionalField(StrFixedLenField('TableDescriptionString', None, 16), lambda pkt: pkt.Type == 3), ConditionalField(IntField('<FlowWildcard>', None), lambda pkt: pkt.Type == 3), ConditionalField(IntField('MaximumNumberOfFlowsSupported', None), lambda pkt: pkt.Type == 3), ConditionalField(IntField('NumberOfFlowsInstalled', None), lambda pkt: pkt.Type == 3), ConditionalField(IntField('NumberOfPacketsLookedUp', None), lambda pkt: pkt.Type == 3), ConditionalField(IntField('NumberOfPacketsMatched', None), lambda pkt: pkt.Type == 3), # Port statistical information ConditionalField(LongField('NumberOfPakctesReceived', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfPakctesTransmitted', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfOctetsReceived', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfOctetsTransmitted', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfPacketsDroppedInReception', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfPacketsDroppedInTransmittion', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfErrorsInReception', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfErrorsInTransmittion', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfAlignmentErrors', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfOverrunErrors', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfCRCErrors', None), lambda pkt: pkt.Type == 4), ConditionalField(LongField('NumberOfCollisionErrors', None), lambda pkt: pkt.Type == 4), ] class OpenFlowBody_v1_0(Packet): """ OpenFlow Packet v1.0 """ name = 'OpenFlowBody_v1_0' fields_desc = [ # Header ByteEnumField('Type', None, ofp_v1_0_message_type), ShortField('Length', None), IntField('ID', None), # Error message body ConditionalField(ShortField('ErrorType', None), lambda pkt: pkt.Type == 1), ConditionalField(ShortField('ErrorCode', None), lambda pkt: pkt.Type == 1), # Echo Request / Echo Reply message body ConditionalField(StrField('Data', None), lambda pkt: pkt.Type in [2, 3]), # Vendor message body ConditionalField(IntField('VendorID', None), lambda pkt: pkt.Type == 4), # Features Reply message body ConditionalField(XLongField('DatapathID', None), lambda pkt: pkt.Type == 6), ConditionalField(IntField('AvailableNumberOfPacketsCanBeHeld', None), lambda pkt: pkt.Type == 6), ConditionalField(ByteField('NumberOfFlowTabs', None), lambda pkt: pkt.Type == 6), ConditionalField(X3BytesField('FeaturesReplyReserved', None), lambda pkt: pkt.Type == 6), ConditionalField(XIntField('<SwitchCapabilityFlags>', None), lambda pkt: pkt.Type == 6), ConditionalField(XIntField('ActionCapabilityFlags', None), lambda pkt: pkt.Type == 6), ConditionalField(_PacketField('FeaturesReply<PortDescriptors>', None, PortDescriptorField_v1_0, 48), lambda pkt: pkt.Type == 6), # Get Config Reply / Set Config message body ConditionalField(XShortField('SwitchConfigurationFlags', None), lambda pkt: pkt.Type in [8, 9]), ConditionalField(ShortField('MissSendLength', None), lambda pkt: pkt.Type in [8, 9]), # Packet Input Notification message body ConditionalField(IntField('PacketInputNotificationPacketBufferID', None), lambda pkt: pkt.Type == 10), ConditionalField(ShortField('EthernetFrameLength', None), lambda pkt: pkt.Type == 10), ConditionalField(ShortField('PacketInputNotificationIngresss<PortNumber>', None), lambda pkt: pkt.Type == 10), ConditionalField(ByteField('PacketInputNotificationReason', None), lambda pkt: pkt.Type == 10), ConditionalField(XByteField('PacketInputNotificationReserved', None), lambda pkt: pkt.Type == 10), ConditionalField(StrLenField('EthenretFrame', None, length_from=lambda pkt:pkt.EthernetFrameLength), lambda pkt: pkt.Type == 10), # Flow Removed Notification message body ConditionalField(_PacketField('FlowRemovedNotification<FlowMatchDescriptor>', None, FlowMatchDescriptorField_v1_0, 40), lambda pkt: pkt.Type == 11), ConditionalField(ShortField('FlowRemovedNotificationPriority', None), lambda pkt: pkt.Type == 11), ConditionalField(ByteField('FlowRemovedNotificationReason', None), lambda pkt: pkt.Type == 11), ConditionalField(XByteField('FlowRemovedNotificationReserved', None), lambda pkt: pkt.Type == 11), ConditionalField(IntField('LifetimeDuration', None), lambda pkt: pkt.Type == 11), ConditionalField(ShortField('FlowRemovedNotificationSoftLifetime', None), lambda pkt: pkt.Type == 11), ConditionalField(StrFixedLenField('FlowRemovedNotification_Reserved', None, 48), lambda pkt: pkt.Type == 11), ConditionalField(LongField('NumberOfPacketsTransferred', None), lambda pkt: pkt.Type == 11), ConditionalField(LongField('NumberOfOctetsTransferred', None), lambda pkt: pkt.Type == 11), # Port Status Notification message body ConditionalField(ByteField('PortStatusNotificationReason', None), lambda pkt: pkt.Type == 12), ConditionalField(StrFixedLenField('PortStatusNotificationReserved', None, 64), lambda pkt: pkt.Type == 12), ConditionalField(_PacketField('PortStatusNotification<PortDescriptors>', None, PortDescriptorField_v1_0, 48), lambda pkt: pkt.Type == 12), # Packet Output message body ConditionalField(IntField('PacketOutputPacketBufferID', None), lambda pkt: pkt.Type == 13), ConditionalField(ShortField('PacketOutputIngress<PortNumber>', None), lambda pkt: pkt.Type == 13), ConditionalField(ShortField('LengthOfActionDescriptors', None), lambda pkt: pkt.Type == 13), ConditionalField(_PacketField('PacketOutput<ActionDescriptors>', None, ActionDescriptorField_v1_0), ## ???? bytes ## lambda pkt: pkt.Type == 13), ConditionalField(StrField('<PacketData>', None), lambda pkt: pkt.Type == 13), # Flow Modification message body ConditionalField(_PacketField('FlowModification<FlowMatchDescriptor>', None, FlowMatchDescriptorField_v1_0, 40), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('Command', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('FlowModificationSoftLifetime', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('HardLifetime', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('FlowModificationPriority', None), lambda pkt: pkt.Type == 14), ConditionalField(IntField('FlowModificationPacketBufferID', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('FlowModificationEgress<PortNumber>', None), lambda pkt: pkt.Type == 14), ConditionalField(XShortField('FlowModificationCU', None), lambda pkt: pkt.Type == 14), ConditionalField(XIntField('FlowModificationReserved', None), lambda pkt: pkt.Type == 14), ConditionalField(_PacketField('FlowModification<ActionDescriptors>', None, ActionDescriptorField_v1_0), ## ???? bytes lambda pkt: pkt.Type == 14), # Port Modification message body ConditionalField(ShortField('PortModification<PortNumber>', None), lambda pkt: pkt.Type == 15), ConditionalField(MACField('Ethernet Address', None), lambda pkt: pkt.Type == 15), ConditionalField(XIntField('<PortConfigurationFlags>', None), lambda pkt: pkt.Type == 15), ConditionalField(XIntField('<PortConfigurationFlags>Mask', None), lambda pkt: pkt.Type == 15), ConditionalField(XIntField('<PortFeatureFlags>', None), lambda pkt: pkt.Type == 15), # Stats Request message body ConditionalField(_PacketField('StatsRequest', None, StatsRequestBody_v1_0, 48), lambda pkt: pkt.Type == 16), # Stats Reply message body ConditionalField(_PacketField('StatsReply', None, StatsReplyBody_v1_0), ## ???? bytes lambda pkt: pkt.Type == 17), StrField('Payload', None), ] # # # # || # # # # # # # # OpenFlow v1.3 # # # # ofp_v1_3_message_type = { 0: 'Hello', 1: 'Error', 2: 'Echo Request', 3: 'Echo Reply', 4: 'Experimenter', 5: 'Features Request', 6: 'Features Reply', 7: 'Get Config Request', 8: 'Get Config Reply', 9: 'Set Config', 10: 'Packet Input Notification', 11: 'Flow Removed Notification', 12: 'Port Status Notification', 13: 'Packet Output', 14: 'Flow Modification', 15: 'Group Modification', 16: 'Port Modification', 17: 'Table Modification', 18: 'Multipart Request', 19: 'Multipart Reply', 20: 'Barrier Request', 21: 'Barrier Reply', 22: 'Queue Get Config Request', 23: 'Queue Get Config Reply', 24: 'Role Request', 25: 'Role Reply', 26: 'Get Async Request', 27: 'Get Async Reply', 28: 'Set Async', 29: 'Meter Modification', } class PortDescriptorField_v1_3(Packet): """ OpenFlow Port Descriptor Field """ name = 'PortDescriptorField_v1_3' fields_desc = [ ShortField('<PortNumber>', None), XByteField('PortDescriptorFieldPad', None), MACField('EthernetAddress', None), XByteField('PortDescriptorFieldPad2', None), StrFixedLenField('Name', None, 16), XIntField('<PortConfigurationFlags>', None), XIntField('<PortStatusFlags>', None), XIntField('Current<PortFeatureFlags>', None), XIntField('Advertising<PortFeatureFlags>', None), XIntField('Supported<PortFeatureFlags>', None), XIntField('PeerAdvertising<PortFeatureFlags>', None), IntField('CurrentSpeed', None), IntField('MaxSpeed', None), ] class ActionDescriptorField_v1_3(Packet): """ OpenFlow Action Descriptor Field """ name = 'ActionDescriptorField_v1_3' fields_desc = [ ShortField('Type', None), ShortField('Length', None), XByteField('Pad', None), ] class OpenFlowBucket_v1_3(Packet): """ OpenFlow Bucket Field """ name = 'OpenFlowBucket_v1_3' fields_desc = [ ShortField('Length', None), ShortField('Weight', None), IntField('WatchPort', None), IntField('WatchGroup', None), XByteField('Pad', None), _PacketField('PacketOutput<ActionDescriptors>', None, ActionDescriptorField_v1_3, 5), ] class OpenFlowQueueProprieties_v1_3(Packet): """ OpenFlow Queue Properties Field """ name = 'OpenFlowBucket_v1_3' fields_desc = [ ShortField('Property', None), ShortField('Length', None), XByteField('Pad', None), ] class OpenFlowPacketQueue_v1_3(Packet): """ OpenFlow Bucket Field """ name = 'OpenFlowBucket_v1_3' fields_desc = [ IntField('QueueID', None), IntField('Port', None), ShortField('Length', None), XByteField('Pad', None), _PacketField('QueueProprieties', None, OpenFlowQueueProprieties_v1_3, 5), ] class OpenFlowMeterBand_v1_3(Packet): """ OpenFlow Meter Band Field """ name = 'OpenFlowMeterBand_v1_3' fields_desc = [ ShortField('Type', None), ShortField('Length', None), IntField('Rate', None), IntField('BurstSize', None), ] class OpenFlowBody_v1_3(Packet): """ OpenFlow Packet v1.3 """ name = 'OpenFlowBody_v1_3' fields_desc = [ # Header ByteEnumField('Type', None, ofp_v1_3_message_type), ShortField('Length', None), IntField('ID', None), # Error message body ConditionalField(ShortField('ErrorType', None), lambda pkt: pkt.Type == 1), ConditionalField(ShortField('ErrorCode', None), lambda pkt: pkt.Type == 1), # Echo Request / Echo Reply message body ConditionalField(StrField('Data', None), lambda pkt: pkt.Type in [1, 2, 3]), # Vendor message body #ConditionalField(IntField('VendorID', None), lambda pkt: pkt.Type == 4), # type ???? # Experimenter ConditionalField(XIntField('ExperimenterID', None), lambda pkt: pkt.Type == 4), ConditionalField(IntField('ExperimenterType', None), lambda pkt: pkt.Type == 4), # Features Reply message body ConditionalField(XLongField('DatapathID', None), lambda pkt: pkt.Type == 6), ConditionalField(IntField('MaxBuffers', None), lambda pkt: pkt.Type == 6), ConditionalField(ByteField('MaxTables', None), lambda pkt: pkt.Type == 6), ConditionalField(ByteField('AuxiliaryID', None), lambda pkt: pkt.Type == 6), ConditionalField(XByteField('FeaturesReplyPad', None), lambda pkt: pkt.Type == 6), ConditionalField(XIntField('Capabilities', None), lambda pkt: pkt.Type == 6), ConditionalField(IntField('FeaturesReplyReserved', None), lambda pkt: pkt.Type == 6), # Set Config message body ConditionalField(XShortField('SwitchConfigurationFlags', None), lambda pkt: pkt.Type == 9), ConditionalField(ShortField('MissSendLength', None), lambda pkt: pkt.Type == 9), # Packet Input Notification message body ConditionalField(IntField('PacketInputNotificationPacketBufferID', None), lambda pkt: pkt.Type == 10), ConditionalField(ShortField('EthernetFrameLength', None), lambda pkt: pkt.Type == 10), ConditionalField(ByteField('PacketInputNotificationReason', None), lambda pkt: pkt.Type == 10), ConditionalField(ByteField('PacketInputNotificationTableID', None), lambda pkt: pkt.Type == 10), ConditionalField(XLongField('PacketInputNotificationCookie', None), lambda pkt: pkt.Type == 10), # Flow Removed Notification message body ConditionalField(XLongField('FlowRemovedNotificationCookie', None), lambda pkt: pkt.Type == 11), ConditionalField(ShortField('FlowRemovedNotificationPriority', None), lambda pkt: pkt.Type == 11), ConditionalField(ByteField('FlowRemovedNotificationReason', None), lambda pkt: pkt.Type == 11), ConditionalField(ByteField('FlowRemovedNotificationTableID', None), lambda pkt: pkt.Type == 11), ConditionalField(IntField('Duration_sec', None), lambda pkt: pkt.Type == 11), ConditionalField(IntField('Duration_nsec', None), lambda pkt: pkt.Type == 11), ConditionalField(ShortField('FlowRemovedNotificationIdleTimeout', None), lambda pkt: pkt.Type == 11), ConditionalField(ShortField('HardTimeout', None), lambda pkt: pkt.Type == 11), ConditionalField(LongField('NumberOfPacketsTransferred', None), lambda pkt: pkt.Type == 11), ConditionalField(LongField('NumberOfOctetsTransferred', None), lambda pkt: pkt.Type == 11), # Port Status Notification message body ConditionalField(ByteField('PortStatusNotificationReason', None), lambda pkt: pkt.Type == 12), ConditionalField(XByteField('PortStatusNotificationPad', None), lambda pkt: pkt.Type == 12), ConditionalField(_PacketField('PortStatusNotification<PortDescriptors>', None, PortDescriptorField_v1_3, 39), lambda pkt: pkt.Type == 12), # Packet Output message body ConditionalField(IntField('PacketOutputPacketBufferID', None), lambda pkt: pkt.Type == 13), ConditionalField(IntField('PacketOutputIngress<PortNumber>', None), lambda pkt: pkt.Type == 13), ConditionalField(ShortField('LengthOfActionDescriptors', None), lambda pkt: pkt.Type == 13), ConditionalField(XByteField('PacketOutputPad', None), lambda pkt: pkt.Type == 13), ConditionalField(_PacketField('PacketOutput<ActionDescriptors>', None, ActionDescriptorField_v1_3, 5), lambda pkt: pkt.Type == 13), # Flow Modification message body ConditionalField(LongField('FlowModificationCookie', None), lambda pkt: pkt.Type == 14), ConditionalField(XLongField('FlowModificationCookieMask', None), lambda pkt: pkt.Type == 14), ConditionalField(ByteField('FlowModificationTableID', None), lambda pkt: pkt.Type == 14), ConditionalField(ByteField('FlowModificationCommand', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('FlowModificationIdleTimeout', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('FlowModificationHardTimeout', None), lambda pkt: pkt.Type == 14), ConditionalField(ShortField('FlowModificationPriority', None), lambda pkt: pkt.Type == 14), ConditionalField(IntField('FlowModificationPacketBufferID', None), lambda pkt: pkt.Type == 14), ConditionalField(IntField('FlowModificationOutputPort', None), lambda pkt: pkt.Type == 14), ConditionalField(IntField('FlowModificationOutputGroup', None), lambda pkt: pkt.Type == 14), ConditionalField(XShortField('FlowModificationFlags', None), lambda pkt: pkt.Type == 14), ConditionalField(XByteField('FlowModificationPad', None), lambda pkt: pkt.Type == 14), # Group Modification message body ConditionalField(ShortField('GroupModificationCommand', None), lambda pkt: pkt.Type == 15), ConditionalField(ByteField('GroupModificationType', None), lambda pkt: pkt.Type == 15), ConditionalField(ByteField('GroupModificationPad', None), lambda pkt: pkt.Type == 15), ConditionalField(IntField('GroupModificationGroupID', None), lambda pkt: pkt.Type == 15), ConditionalField(_PacketField('GroupModificationBucket', None, OpenFlowBucket_v1_3, 18), lambda pkt: pkt.Type == 15), # Port Modification message body ConditionalField(ShortField('PortModification<PortNumber>', None), lambda pkt: pkt.Type == 16), ConditionalField(XByteField('PortModificationPad', None), lambda pkt: pkt.Type == 16), ConditionalField(MACField('PortModificationEthernetAddress', None), lambda pkt: pkt.Type == 16), ConditionalField(XByteField('PortModificationPad2', None), lambda pkt: pkt.Type == 16), ConditionalField(XIntField('<PortConfigurationFlags>', None), lambda pkt: pkt.Type == 16), ConditionalField(XIntField('<PortConfigurationFlags>Mask', None), lambda pkt: pkt.Type == 16), ConditionalField(XIntField('<PortConfigurationFlags>Advertise', None), lambda pkt: pkt.Type == 16), ConditionalField(XByteField('PortModificationPad3', None), lambda pkt: pkt.Type == 16), # Table Modification message body ConditionalField(ByteField('TableModificationTableID', None), lambda pkt: pkt.Type == 17), ConditionalField(XByteField('TableModificationPad', None), lambda pkt: pkt.Type == 17), ConditionalField(XIntField('TableModificationConfiguration', None), lambda pkt: pkt.Type == 17), # Multipart Request / Multipart Reply message body ConditionalField(ShortField('MultipartType', None), lambda pkt: pkt.Type in [18, 19]), ConditionalField(XShortField('MultipartFlags', None), lambda pkt: pkt.Type in [18, 19]), ConditionalField(XByteField('MultipartPad', None), lambda pkt: pkt.Type in [18, 19]), ConditionalField(ByteField('MultipartBody', None), lambda pkt: pkt.Type in [18, 19]), # Barrier Reply message body ConditionalField(IntField('BarrierReplyID', None), lambda pkt: pkt.Type == 21), # Queue Get Config Request / Queue Get Config Reply message body ConditionalField(IntField('QueueGetConfigRequestPort', None), lambda pkt: pkt.Type in [22, 23]), ConditionalField(ByteField('QueueGetConfigRequestPad', None), lambda pkt: pkt.Type in [22, 23]), ConditionalField(_PacketField('OpenFlowPacketQueue', None, OpenFlowPacketQueue_v1_3, 16), lambda pkt: pkt.Type == 23), # Role Request / Role Reply message body ConditionalField(IntField('RoleRequestRole', None), lambda pkt: pkt.Type in [24, 25]), ConditionalField(ByteField('RoleRequestPad', None), lambda pkt: pkt.Type in [24, 25]), ConditionalField(LongField('RoleRequestGenerationID', None), lambda pkt: pkt.Type in [24, 25]), # Get Async Reply / Set Async messages body ConditionalField(XIntField('GetAsyncReplyPacketInMask', None), lambda pkt: pkt.Type in [27, 28]), ConditionalField(XIntField('GetAsyncReplyPortStatusMask', None), lambda pkt: pkt.Type in [27, 28]), ConditionalField(XIntField('GetAsyncReplyFlowRemovedMask', None), lambda pkt: pkt.Type in [27, 28]), # Meter Modification message body ConditionalField(ShortField('MeterModificationCommand', None), lambda pkt: pkt.Type == 29), ConditionalField(XShortField('MeterModificationFlags', None), lambda pkt: pkt.Type == 29), ConditionalField(IntField('MeterModificationMeterID', None), lambda pkt: pkt.Type == 29), ConditionalField(_PacketField('MeterModificationMeterBand', None, OpenFlowMeterBand_v1_3, 12), lambda pkt: pkt.Type == 29), StrField('Payload', None), ] class OpenFlow(Packet): """ OpenFlow Packet """ name = 'OFP' fields_desc = [ # Header ByteField('Version', None), # Body ConditionalField(PacketField('OpenFlowBody_v1_0', None, OpenFlowBody_v1_0), lambda pkt: pkt.Version in [1, 2]), ConditionalField(PacketField('OpenFlowBody_v1_3', None, OpenFlowBody_v1_3), lambda pkt: pkt.Version in [3, 4]), ] class CentralEngineObject: """ Packet Sniffer Central Engine Objects """ def __init__(self, proxy): self.proxy = proxy self.pluginStatus = None self.PAUSED = False
joyxu/autotest
refs/heads/master
database_legacy/migrations/020_add_host_dirty_and_job_reboots.py
6
UP_SQL = """ ALTER TABLE hosts ADD COLUMN `dirty` bool NOT NULL; ALTER TABLE jobs ADD COLUMN `reboot_before` smallint NOT NULL; ALTER TABLE jobs ADD COLUMN `reboot_after` smallint NOT NULL; """ DOWN_SQL = """ ALTER TABLE hosts DROP COLUMN `dirty`; ALTER TABLE jobs DROP COLUMN `reboot_before`; ALTER TABLE jobs DROP COLUMN `reboot_after`; """ def migrate_up(manager): manager.execute_script(UP_SQL) def migrate_down(manager): manager.execute_script(DOWN_SQL)
ar45/django
refs/heads/master
django/contrib/gis/gdal/feature.py
439
from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException, OGRIndexError from django.contrib.gis.gdal.field import Field from django.contrib.gis.gdal.geometries import OGRGeometry, OGRGeomType from django.contrib.gis.gdal.prototypes import ds as capi, geom as geom_api from django.utils import six from django.utils.encoding import force_bytes, force_text from django.utils.six.moves import range # For more information, see the OGR C API source code: # http://www.gdal.org/ogr/ogr__api_8h.html # # The OGR_F_* routines are relevant here. class Feature(GDALBase): """ This class that wraps an OGR Feature, needs to be instantiated from a Layer object. """ def __init__(self, feat, layer): """ Initializes Feature from a pointer and its Layer object. """ if not feat: raise GDALException('Cannot create OGR Feature, invalid pointer given.') self.ptr = feat self._layer = layer def __del__(self): "Releases a reference to this object." if self._ptr and capi: capi.destroy_feature(self._ptr) def __getitem__(self, index): """ Gets the Field object at the specified index, which may be either an integer or the Field's string label. Note that the Field object is not the field's _value_ -- use the `get` method instead to retrieve the value (e.g. an integer) instead of a Field instance. """ if isinstance(index, six.string_types): i = self.index(index) else: if index < 0 or index > self.num_fields: raise OGRIndexError('index out of range') i = index return Field(self, i) def __iter__(self): "Iterates over each field in the Feature." for i in range(self.num_fields): yield self[i] def __len__(self): "Returns the count of fields in this feature." return self.num_fields def __str__(self): "The string name of the feature." return 'Feature FID %d in Layer<%s>' % (self.fid, self.layer_name) def __eq__(self, other): "Does equivalence testing on the features." return bool(capi.feature_equal(self.ptr, other._ptr)) # #### Feature Properties #### @property def encoding(self): return self._layer._ds.encoding @property def fid(self): "Returns the feature identifier." return capi.get_fid(self.ptr) @property def layer_name(self): "Returns the name of the layer for the feature." name = capi.get_feat_name(self._layer._ldefn) return force_text(name, self.encoding, strings_only=True) @property def num_fields(self): "Returns the number of fields in the Feature." return capi.get_feat_field_count(self.ptr) @property def fields(self): "Returns a list of fields in the Feature." return [capi.get_field_name(capi.get_field_defn(self._layer._ldefn, i)) for i in range(self.num_fields)] @property def geom(self): "Returns the OGR Geometry for this Feature." # Retrieving the geometry pointer for the feature. geom_ptr = capi.get_feat_geom_ref(self.ptr) return OGRGeometry(geom_api.clone_geom(geom_ptr)) @property def geom_type(self): "Returns the OGR Geometry Type for this Feture." return OGRGeomType(capi.get_fd_geom_type(self._layer._ldefn)) # #### Feature Methods #### def get(self, field): """ Returns the value of the field, instead of an instance of the Field object. May take a string of the field name or a Field object as parameters. """ field_name = getattr(field, 'name', field) return self[field_name].value def index(self, field_name): "Returns the index of the given field name." i = capi.get_field_index(self.ptr, force_bytes(field_name)) if i < 0: raise OGRIndexError('invalid OFT field name given: "%s"' % field_name) return i
NLeSC/cptm
refs/heads/master
cptm/experiment_cptcorpus_count_words.py
1
"""Count the number of topic and opinion words in the corpus. Usage: python experiment_cptcorpus_count_words.py <experiment.json> """ import logging import argparse import sys import tarfile import os from utils.experiment import load_config, get_corpus, get_sampler, \ thetaFileName, topicFileName, opinionFileName, tarFileName logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) parser = argparse.ArgumentParser() parser.add_argument('json', help='json file containing experiment ' 'configuration.') args = parser.parse_args() config = load_config(args.json) corpus = get_corpus(config) num_topic_words = 0 num_opinion_words = 0 for d, persp, d_p, doc in corpus: for w_id, i in corpus.words_in_document(doc, 'topic'): num_topic_words += i for w_id, i in corpus.words_in_document(doc, 'opinion'): num_opinion_words += i print 'Number of topic words in corpus', num_topic_words print 'Number of opinion words in corpus', num_opinion_words
gannetson/django
refs/heads/master
tests/gis_tests/geoapp/feeds.py
367
from __future__ import unicode_literals from django.contrib.gis import feeds from .models import City class TestGeoRSS1(feeds.Feed): link = '/city/' title = 'Test GeoDjango Cities' def items(self): return City.objects.all() def item_link(self, item): return '/city/%s/' % item.pk def item_geometry(self, item): return item.point class TestGeoRSS2(TestGeoRSS1): def geometry(self, obj): # This should attach a <georss:box> element for the extent of # of the cities in the database. This tuple came from # calling `City.objects.extent()` -- we can't do that call here # because `extent` is not implemented for MySQL/Oracle. return (-123.30, -41.32, 174.78, 48.46) def item_geometry(self, item): # Returning a simple tuple for the geometry. return item.point.x, item.point.y class TestGeoAtom1(TestGeoRSS1): feed_type = feeds.GeoAtom1Feed class TestGeoAtom2(TestGeoRSS2): feed_type = feeds.GeoAtom1Feed def geometry(self, obj): # This time we'll use a 2-tuple of coordinates for the box. return ((-123.30, -41.32), (174.78, 48.46)) class TestW3CGeo1(TestGeoRSS1): feed_type = feeds.W3CGeoFeed # The following feeds are invalid, and will raise exceptions. class TestW3CGeo2(TestGeoRSS2): feed_type = feeds.W3CGeoFeed class TestW3CGeo3(TestGeoRSS1): feed_type = feeds.W3CGeoFeed def item_geometry(self, item): from django.contrib.gis.geos import Polygon return Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) # The feed dictionary to use for URLs. feed_dict = { 'rss1': TestGeoRSS1, 'rss2': TestGeoRSS2, 'atom1': TestGeoAtom1, 'atom2': TestGeoAtom2, 'w3cgeo1': TestW3CGeo1, 'w3cgeo2': TestW3CGeo2, 'w3cgeo3': TestW3CGeo3, }
nagyistoce/euca2ools
refs/heads/master
euca2ools/commands/iam/updateaccesskey.py
5
# Copyright 2009-2015 Eucalyptus Systems, Inc. # # Redistribution and use of this software in source and binary forms, # with or without modification, are permitted provided that the following # conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from requestbuilder import Arg from euca2ools.commands.iam import IAMRequest, AS_ACCOUNT, arg_key_id class UpdateAccessKey(IAMRequest): DESCRIPTION = ('Change the status of an access key from Active to ' 'Inactive, or vice versa') ARGS = [arg_key_id(help='ID of the access key to update (required)'), Arg('-s', '--status', dest='Status', required=True, choices=('Active', 'Inactive'), help='status to assign to the access key (required)'), Arg('-u', '--user-name', dest='UserName', metavar='USER', help='''user owning the access key to update (default: current user)'''), AS_ACCOUNT]
wliu2016/sending_email
refs/heads/master
emaildata/__init__.py
1349
# -*- coding: utf-8 -*-
Just-D/chromium-1
refs/heads/master
tools/perf/metrics/speedindex.py
14
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.util import image_util from telemetry.util import rgba_color from telemetry.value import scalar from metrics import Metric class SpeedIndexMetric(Metric): """The speed index metric is one way of measuring page load speed. It is meant to approximate user perception of page load speed, and it is based on the amount of time that it takes to paint to the visual portion of the screen. It includes paint events that occur after the onload event, and it doesn't include time loading things off-screen. This speed index metric is based on WebPageTest.org (WPT). For more info see: http://goo.gl/e7AH5l """ def __init__(self): super(SpeedIndexMetric, self).__init__() self._impl = None @classmethod def CustomizeBrowserOptions(cls, options): options.AppendExtraBrowserArgs('--disable-infobars') def Start(self, _, tab): """Start recording events. This method should be called in the WillNavigateToPage method of a PageTest, so that all the events can be captured. If it's called in DidNavigateToPage, that will be too late. """ if not tab.video_capture_supported: return self._impl = VideoSpeedIndexImpl() self._impl.Start(tab) def Stop(self, _, tab): """Stop recording.""" if not tab.video_capture_supported: return assert self._impl, 'Must call Start() before Stop()' assert self.IsFinished(tab), 'Must wait for IsFinished() before Stop()' self._impl.Stop(tab) # Optional argument chart_name is not in base class Metric. # pylint: disable=W0221 def AddResults(self, tab, results, chart_name=None): """Calculate the speed index and add it to the results.""" try: if tab.video_capture_supported: index = self._impl.CalculateSpeedIndex(tab) none_value_reason = None else: index = None none_value_reason = 'Video capture is not supported.' finally: self._impl = None # Release the tab so that it can be disconnected. results.AddValue(scalar.ScalarValue( results.current_page, '%s_speed_index' % chart_name, 'ms', index, description='Speed Index. This focuses on time when visible parts of ' 'page are displayed and shows the time when the ' 'first look is "almost" composed. If the contents of the ' 'testing page are composed by only static resources, load ' 'time can measure more accurately and speed index will be ' 'smaller than the load time. On the other hand, If the ' 'contents are composed by many XHR requests with small ' 'main resource and javascript, speed index will be able to ' 'get the features of performance more accurately than load ' 'time because the load time will measure the time when ' 'static resources are loaded. If you want to get more ' 'detail, please refer to http://goo.gl/Rw3d5d. Currently ' 'there are two implementations: for Android and for ' 'Desktop. The Android version uses video capture; the ' 'Desktop one uses paint events and has extra overhead to ' 'catch paint events.', none_value_reason=none_value_reason)) def IsFinished(self, tab): """Decide whether the recording should be stopped. A page may repeatedly request resources in an infinite loop; a timeout should be placed in any measurement that uses this metric, e.g.: def IsDone(): return self._speedindex.IsFinished(tab) util.WaitFor(IsDone, 60) Returns: True if 2 seconds have passed since last resource received, false otherwise. """ return tab.HasReachedQuiescence() class SpeedIndexImpl(object): def Start(self, tab): raise NotImplementedError() def Stop(self, tab): raise NotImplementedError() def GetTimeCompletenessList(self, tab): """Returns a list of time to visual completeness tuples. In the WPT PHP implementation, this is also called 'visual progress'. """ raise NotImplementedError() def CalculateSpeedIndex(self, tab): """Calculate the speed index. The speed index number conceptually represents the number of milliseconds that the page was "visually incomplete". If the page were 0% complete for 1000 ms, then the score would be 1000; if it were 0% complete for 100 ms then 90% complete (ie 10% incomplete) for 900 ms, then the score would be 1.0*100 + 0.1*900 = 190. Returns: A single number, milliseconds of visual incompleteness. """ time_completeness_list = self.GetTimeCompletenessList(tab) prev_completeness = 0.0 speed_index = 0.0 prev_time = time_completeness_list[0][0] for time, completeness in time_completeness_list: # Add the incemental value for the interval just before this event. elapsed_time = time - prev_time incompleteness = (1.0 - prev_completeness) speed_index += elapsed_time * incompleteness # Update variables for next iteration. prev_completeness = completeness prev_time = time return int(speed_index) class VideoSpeedIndexImpl(SpeedIndexImpl): def __init__(self, image_util_module=image_util): # Allow image_util to be passed in so we can fake it out for testing. super(VideoSpeedIndexImpl, self).__init__() self._time_completeness_list = None self._image_util_module = image_util_module def Start(self, tab): assert tab.video_capture_supported # Blank out the current page so it doesn't count towards the new page's # completeness. tab.Highlight(rgba_color.WHITE) # TODO(tonyg): Bitrate is arbitrary here. Experiment with screen capture # overhead vs. speed index accuracy and set the bitrate appropriately. tab.StartVideoCapture(min_bitrate_mbps=4) def Stop(self, tab): # Ignore white because Chrome may blank out the page during load and we want # that to count as 0% complete. Relying on this fact, we also blank out the # previous page to white. The tolerance of 8 experimentally does well with # video capture at 4mbps. We should keep this as low as possible with # supported video compression settings. video_capture = tab.StopVideoCapture() histograms = [(time, self._image_util_module.GetColorHistogram( image, ignore_color=rgba_color.WHITE, tolerance=8)) for time, image in video_capture.GetVideoFrameIter()] start_histogram = histograms[0][1] final_histogram = histograms[-1][1] total_distance = start_histogram.Distance(final_histogram) def FrameProgress(histogram): if total_distance == 0: if histogram.Distance(final_histogram) == 0: return 1.0 else: return 0.0 return 1 - histogram.Distance(final_histogram) / total_distance self._time_completeness_list = [(time, FrameProgress(hist)) for time, hist in histograms] def GetTimeCompletenessList(self, tab): assert self._time_completeness_list, 'Must call Stop() first.' return self._time_completeness_list
Web5design/node-gyp
refs/heads/master
gyp/pylib/gyp/MSVSSettings.py
437
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Code to validate and convert settings of the Microsoft build tools. This file contains code to validate and convert settings of the Microsoft build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), and ValidateMSBuildSettings() are the entry points. This file was created by comparing the projects created by Visual Studio 2008 and Visual Studio 2010 for all available settings through the user interface. The MSBuild schemas were also considered. They are typically found in the MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ import sys import re # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} _msbuild_validators = {} # A dictionary of settings converters. The key is the tool name, the value is # a dictionary mapping setting names to conversion functions. _msvs_to_msbuild_converters = {} # Tool name mapping from MSVS to MSBuild. _msbuild_name_of_tool = {} class _Tool(object): """Represents a tool used by MSVS or MSBuild. Attributes: msvs_name: The name of the tool in MSVS. msbuild_name: The name of the tool in MSBuild. """ def __init__(self, msvs_name, msbuild_name): self.msvs_name = msvs_name self.msbuild_name = msbuild_name def _AddTool(tool): """Adds a tool to the four dictionaries used to process settings. This only defines the tool. Each setting also needs to be added. Args: tool: The _Tool object to be added. """ _msvs_validators[tool.msvs_name] = {} _msbuild_validators[tool.msbuild_name] = {} _msvs_to_msbuild_converters[tool.msvs_name] = {} _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name def _GetMSBuildToolSettings(msbuild_settings, tool): """Returns an MSBuild tool dictionary. Creates it if needed.""" return msbuild_settings.setdefault(tool.msbuild_name, {}) class _Type(object): """Type of settings (Base class).""" def ValidateMSVS(self, value): """Verifies that the value is legal for MSVS. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSVS. """ def ValidateMSBuild(self, value): """Verifies that the value is legal for MSBuild. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSBuild. """ def ConvertToMSBuild(self, value): """Returns the MSBuild equivalent of the MSVS value given. Args: value: the MSVS value to convert. Returns: the MSBuild equivalent. Raises: ValueError if value is not valid. """ return value class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros return ConvertVCMacrosToMSBuild(value) class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros if isinstance(value, list): return [ConvertVCMacrosToMSBuild(i) for i in value] else: return ConvertVCMacrosToMSBuild(value) class _Boolean(_Type): """Boolean settings, can have the values 'false' or 'true'.""" def _Validate(self, value): if value != 'true' and value != 'false': raise ValueError('expected bool; got %r' % value) def ValidateMSVS(self, value): self._Validate(value) def ValidateMSBuild(self, value): self._Validate(value) def ConvertToMSBuild(self, value): self._Validate(value) return value class _Integer(_Type): """Integer settings.""" def __init__(self, msbuild_base=10): _Type.__init__(self) self._msbuild_base = msbuild_base def ValidateMSVS(self, value): # Try to convert, this will raise ValueError if invalid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): # Try to convert, this will raise ValueError if invalid. int(value, self._msbuild_base) def ConvertToMSBuild(self, value): msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x' return msbuild_format % int(value) class _Enumeration(_Type): """Type of settings that is an enumeration. In MSVS, the values are indexes like '0', '1', and '2'. MSBuild uses text labels that are more representative, like 'Win32'. Constructor args: label_list: an array of MSBuild labels that correspond to the MSVS index. In the rare cases where MSVS has skipped an index value, None is used in the array to indicate the unused spot. new: an array of labels that are new to MSBuild. """ def __init__(self, label_list, new=None): _Type.__init__(self) self._label_list = label_list self._msbuild_values = set(value for value in label_list if value is not None) if new is not None: self._msbuild_values.update(new) def ValidateMSVS(self, value): # Try to convert. It will raise an exception if not valid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): if value not in self._msbuild_values: raise ValueError('unrecognized enumerated value %s' % value) def ConvertToMSBuild(self, value): index = int(value) if index < 0 or index >= len(self._label_list): raise ValueError('index value (%d) not in expected range [0, %d)' % (index, len(self._label_list))) label = self._label_list[index] if label is None: raise ValueError('converted value for %s not specified.' % value) return label # Instantiate the various generic types. _boolean = _Boolean() _integer = _Integer() # For now, we don't do any special validation on these types: _string = _String() _file_name = _String() _folder_name = _String() _file_list = _StringList() _folder_list = _StringList() _string_list = _StringList() # Some boolean settings went from numerical values to boolean. The # mapping is 0: default, 1: false, 2: true. _newly_boolean = _Enumeration(['', 'false', 'true']) def _Same(tool, name, setting_type): """Defines a setting that has the same name in MSVS and MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _Renamed(tool, name, name, setting_type) def _Renamed(tool, msvs_name, msbuild_name, setting_type): """Defines a setting for which the name has changed. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting. msbuild_name: the name of the MSBuild setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS _msbuild_validators[tool.msbuild_name][msbuild_name] = ( setting_type.ValidateMSBuild) _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _Moved(tool, settings_name, msbuild_tool_name, setting_type): _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name, setting_type) def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type): """Defines a setting that may have moved to a new section. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_settings_name: the MSVS name of the setting. msbuild_tool_name: the name of the MSBuild tool to place the setting under. msbuild_settings_name: the MSBuild name of the setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_settings_name] = ( setting_type.ValidateMSVS) validator = setting_type.ValidateMSBuild _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate def _MSVSOnly(tool, name, setting_type): """Defines a setting that is only found in MSVS. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ def _Translate(unused_value, unused_msbuild_settings): # Since this is for MSVS only settings, no translation will happen. pass _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate def _MSBuildOnly(tool, name, setting_type): """Defines a setting that is only found in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild def _ConvertedToAdditionalOption(tool, msvs_name, flag): """Defines a setting that's handled via a command line option in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting that if 'true' becomes a flag flag: the flag to insert at the end of the AdditionalOptions """ def _Translate(value, msbuild_settings): if value == 'true': tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if 'AdditionalOptions' in tool_settings: new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag) else: new_flags = flag tool_settings['AdditionalOptions'] = new_flags _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _CustomGeneratePreprocessedFile(tool, msvs_name): def _Translate(value, msbuild_settings): tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if value == '0': tool_settings['PreprocessToFile'] = 'false' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '1': # /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '2': # /EP /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'true' else: raise ValueError('value must be one of [0, 1, 2]; got %s' % value) # Create a bogus validator that looks for '0', '1', or '2' msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator msbuild_validator = _boolean.ValidateMSBuild msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] msbuild_tool_validators['PreprocessToFile'] = msbuild_validator msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir') fix_vc_macro_slashes_regex = re.compile( r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list) ) def FixVCMacroSlashes(s): """Replace macros which have excessive following slashes. These macros are known to have a built-in trailing slash. Furthermore, many scripts hiccup on processing paths with extra slashes in the middle. This list is probably not exhaustive. Add as needed. """ if '$' in s: s = fix_vc_macro_slashes_regex.sub(r'\1', s) return s def ConvertVCMacrosToMSBuild(s): """Convert the the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed. """ if '$' in s: replace_map = { '$(ConfigurationName)': '$(Configuration)', '$(InputDir)': '%(RootDir)%(Directory)', '$(InputExt)': '%(Extension)', '$(InputFileName)': '%(Filename)%(Extension)', '$(InputName)': '%(Filename)', '$(InputPath)': '%(FullPath)', '$(ParentName)': '$(ProjectFileName)', '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } for old, new in replace_map.iteritems(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). Args: msvs_settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. Returns: A dictionary of MSBuild settings. The key is either the MSBuild tool name or the empty string (for the global settings). The values are themselves dictionaries of settings and their values. """ msbuild_settings = {} for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): if msvs_setting in msvs_tool: # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) except ValueError, e: print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' '%s' % (msvs_tool_name, msvs_setting, e)) else: # We don't know this setting. Give a warning. print >> stderr, ('Warning: unrecognized setting %s/%s ' 'while converting to MSBuild.' % (msvs_tool_name, msvs_setting)) else: print >> stderr, ('Warning: unrecognized tool %s while converting to ' 'MSBuild.' % msvs_tool_name) return msbuild_settings def ValidateMSVSSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSVS. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msvs_validators, settings, stderr) def ValidateMSBuildSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSBuild. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msbuild_validators, settings, stderr) def _ValidateSettings(validators, settings, stderr): """Validates that the settings are valid for MSBuild or MSVS. We currently only validate the names of the settings, not their values. Args: validators: A dictionary of tools and their validators. settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] for setting, value in settings[tool_name].iteritems(): if setting in tool_validators: try: tool_validators[setting](value) except ValueError, e: print >> stderr, ('Warning: for %s/%s, %s' % (tool_name, setting, e)) else: print >> stderr, ('Warning: unrecognized setting %s/%s' % (tool_name, setting)) else: print >> stderr, ('Warning: unrecognized tool %s' % tool_name) # MSVS and MBuild names of the tools. _compile = _Tool('VCCLCompilerTool', 'ClCompile') _link = _Tool('VCLinkerTool', 'Link') _midl = _Tool('VCMIDLTool', 'Midl') _rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') _lib = _Tool('VCLibrarianTool', 'Lib') _manifest = _Tool('VCManifestTool', 'Manifest') _AddTool(_compile) _AddTool(_link) _AddTool(_midl) _AddTool(_rc) _AddTool(_lib) _AddTool(_manifest) # Add sections only found in the MSBuild settings. _msbuild_validators[''] = {} _msbuild_validators['ProjectReference'] = {} _msbuild_validators['ManifestResourceCompile'] = {} # Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and # ClCompile in MSBuild. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for # the schema of the MSBuild ClCompile settings. # Options that have the same name in MSVS and MSBuild _Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_compile, 'AdditionalOptions', _string_list) _Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI _Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa _Same(_compile, 'BrowseInformationFile', _file_name) _Same(_compile, 'BufferSecurityCheck', _boolean) # /GS _Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za _Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd _Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT _Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false' _Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx _Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except _Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope _Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI _Same(_compile, 'ForcedUsingFiles', _file_list) # /FU _Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc _Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_compile, 'MinimalRebuild', _boolean) # /Gm _Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl _Same(_compile, 'OmitFramePointers', _boolean) # /Oy _Same(_compile, 'PreprocessorDefinitions', _string_list) # /D _Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd _Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR _Same(_compile, 'ShowIncludes', _boolean) # /showIncludes _Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc _Same(_compile, 'StringPooling', _boolean) # /GF _Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo _Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t _Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u _Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_compile, 'UseFullPaths', _boolean) # /FC _Same(_compile, 'WholeProgramOptimization', _boolean) # /GL _Same(_compile, 'XMLDocumentationFileName', _file_name) _Same(_compile, 'AssemblerOutput', _Enumeration(['NoListing', 'AssemblyCode', # /FA 'All', # /FAcs 'AssemblyAndMachineCode', # /FAc 'AssemblyAndSourceCode'])) # /FAs _Same(_compile, 'BasicRuntimeChecks', _Enumeration(['Default', 'StackFrameRuntimeCheck', # /RTCs 'UninitializedLocalUsageCheck', # /RTCu 'EnableFastChecks'])) # /RTC1 _Same(_compile, 'BrowseInformation', _Enumeration(['false', 'true', # /FR 'true'])) # /Fr _Same(_compile, 'CallingConvention', _Enumeration(['Cdecl', # /Gd 'FastCall', # /Gr 'StdCall'])) # /Gz _Same(_compile, 'CompileAs', _Enumeration(['Default', 'CompileAsC', # /TC 'CompileAsCpp'])) # /TP _Same(_compile, 'DebugInformationFormat', _Enumeration(['', # Disabled 'OldStyle', # /Z7 None, 'ProgramDatabase', # /Zi 'EditAndContinue'])) # /ZI _Same(_compile, 'EnableEnhancedInstructionSet', _Enumeration(['NotSet', 'StreamingSIMDExtensions', # /arch:SSE 'StreamingSIMDExtensions2'])) # /arch:SSE2 _Same(_compile, 'ErrorReporting', _Enumeration(['None', # /errorReport:none 'Prompt', # /errorReport:prompt 'Queue'], # /errorReport:queue new=['Send'])) # /errorReport:send" _Same(_compile, 'ExceptionHandling', _Enumeration(['false', 'Sync', # /EHsc 'Async'], # /EHa new=['SyncCThrow'])) # /EHs _Same(_compile, 'FavorSizeOrSpeed', _Enumeration(['Neither', 'Speed', # /Ot 'Size'])) # /Os _Same(_compile, 'FloatingPointModel', _Enumeration(['Precise', # /fp:precise 'Strict', # /fp:strict 'Fast'])) # /fp:fast _Same(_compile, 'InlineFunctionExpansion', _Enumeration(['Default', 'OnlyExplicitInline', # /Ob1 'AnySuitable'], # /Ob2 new=['Disabled'])) # /Ob0 _Same(_compile, 'Optimization', _Enumeration(['Disabled', # /Od 'MinSpace', # /O1 'MaxSpeed', # /O2 'Full'])) # /Ox _Same(_compile, 'RuntimeLibrary', _Enumeration(['MultiThreaded', # /MT 'MultiThreadedDebug', # /MTd 'MultiThreadedDLL', # /MD 'MultiThreadedDebugDLL'])) # /MDd _Same(_compile, 'StructMemberAlignment', _Enumeration(['Default', '1Byte', # /Zp1 '2Bytes', # /Zp2 '4Bytes', # /Zp4 '8Bytes', # /Zp8 '16Bytes'])) # /Zp16 _Same(_compile, 'WarningLevel', _Enumeration(['TurnOffAllWarnings', # /W0 'Level1', # /W1 'Level2', # /W2 'Level3', # /W3 'Level4'], # /W4 new=['EnableAllWarnings'])) # /Wall # Options found in MSVS that have been renamed in MSBuild. _Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking', _boolean) # /Gy _Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions', _boolean) # /Oi _Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C _Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo _Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp _Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile', _file_name) # Used with /Yc and /Yu _Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile', _file_name) # /Fp _Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader', _Enumeration(['NotUsing', # VS recognized '' for this value too. 'Create', # /Yc 'Use'])) # /Yu _Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX _ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J') # MSVS options not found in MSBuild. _MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean) _MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_compile, 'BuildingInIDE', _boolean) _MSBuildOnly(_compile, 'CompileAsManaged', _Enumeration([], new=['false', 'true', # /clr 'Pure', # /clr:pure 'Safe', # /clr:safe 'OldSyntax'])) # /clr:oldSyntax _MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch _MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP _MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi _MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors _MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we _MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu # Defines a setting that needs very customized processing _CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile') # Directives for converting MSVS VCLinkerTool to MSBuild Link. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for # the schema of the MSBuild Link settings. # Options that have the same name in MSVS and MSBuild _Same(_link, 'AdditionalDependencies', _file_list) _Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH # /MANIFESTDEPENDENCY: _Same(_link, 'AdditionalManifestDependencies', _file_list) _Same(_link, 'AdditionalOptions', _string_list) _Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE _Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION _Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE _Same(_link, 'BaseAddress', _string) # /BASE _Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK _Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD _Same(_link, 'DelaySign', _boolean) # /DELAYSIGN _Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE _Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC _Same(_link, 'EntryPointSymbol', _string) # /ENTRY _Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE _Same(_link, 'FunctionOrder', _file_name) # /ORDER _Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG _Same(_link, 'GenerateMapFile', _boolean) # /MAP _Same(_link, 'HeapCommitSize', _string) _Same(_link, 'HeapReserveSize', _string) # /HEAP _Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL _Same(_link, 'ImportLibrary', _file_name) # /IMPLIB _Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER _Same(_link, 'KeyFile', _file_name) # /KEYFILE _Same(_link, 'ManifestFile', _file_name) # /ManifestFile _Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS _Same(_link, 'MapFileName', _file_name) _Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT _Same(_link, 'MergeSections', _string) # /MERGE _Same(_link, 'MidlCommandFile', _file_name) # /MIDL _Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_link, 'OutputFile', _file_name) # /OUT _Same(_link, 'PerUserRedirection', _boolean) _Same(_link, 'Profile', _boolean) # /PROFILE _Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD _Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB _Same(_link, 'RegisterOutput', _boolean) _Same(_link, 'SetChecksum', _boolean) # /RELEASE _Same(_link, 'StackCommitSize', _string) _Same(_link, 'StackReserveSize', _string) # /STACK _Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED _Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD _Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD _Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY _Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT _Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID _Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true' _Same(_link, 'Version', _string) # /VERSION _Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF _Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED _Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE _Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF _Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE _Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE _subsystem_enumeration = _Enumeration( ['NotSet', 'Console', # /SUBSYSTEM:CONSOLE 'Windows', # /SUBSYSTEM:WINDOWS 'Native', # /SUBSYSTEM:NATIVE 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER 'EFI ROM', # /SUBSYSTEM:EFI_ROM 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE new=['POSIX']) # /SUBSYSTEM:POSIX _target_machine_enumeration = _Enumeration( ['NotSet', 'MachineX86', # /MACHINE:X86 None, 'MachineARM', # /MACHINE:ARM 'MachineEBC', # /MACHINE:EBC 'MachineIA64', # /MACHINE:IA64 None, 'MachineMIPS', # /MACHINE:MIPS 'MachineMIPS16', # /MACHINE:MIPS16 'MachineMIPSFPU', # /MACHINE:MIPSFPU 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16 None, None, None, 'MachineSH4', # /MACHINE:SH4 None, 'MachineTHUMB', # /MACHINE:THUMB 'MachineX64']) # /MACHINE:X64 _Same(_link, 'AssemblyDebug', _Enumeration(['', 'true', # /ASSEMBLYDEBUG 'false'])) # /ASSEMBLYDEBUG:DISABLE _Same(_link, 'CLRImageType', _Enumeration(['Default', 'ForceIJWImage', # /CLRIMAGETYPE:IJW 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE _Same(_link, 'CLRThreadAttribute', _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA _Same(_link, 'DataExecutionPrevention', _Enumeration(['', 'false', # /NXCOMPAT:NO 'true'])) # /NXCOMPAT _Same(_link, 'Driver', _Enumeration(['NotSet', 'Driver', # /Driver 'UpOnly', # /DRIVER:UPONLY 'WDM'])) # /DRIVER:WDM _Same(_link, 'LinkTimeCodeGeneration', _Enumeration(['Default', 'UseLinkTimeCodeGeneration', # /LTCG 'PGInstrument', # /LTCG:PGInstrument 'PGOptimization', # /LTCG:PGOptimize 'PGUpdate'])) # /LTCG:PGUpdate _Same(_link, 'ShowProgress', _Enumeration(['NotSet', 'LinkVerbose', # /VERBOSE 'LinkVerboseLib'], # /VERBOSE:Lib new=['LinkVerboseICF', # /VERBOSE:ICF 'LinkVerboseREF', # /VERBOSE:REF 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH 'LinkVerboseCLR'])) # /VERBOSE:CLR _Same(_link, 'SubSystem', _subsystem_enumeration) _Same(_link, 'TargetMachine', _target_machine_enumeration) _Same(_link, 'UACExecutionLevel', _Enumeration(['AsInvoker', # /level='asInvoker' 'HighestAvailable', # /level='highestAvailable' 'RequireAdministrator'])) # /level='requireAdministrator' _Same(_link, 'MinimumRequiredVersion', _string) _Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX # Options found in MSVS that have been renamed in MSBuild. _Renamed(_link, 'ErrorReporting', 'LinkErrorReporting', _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE 'PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin'], # /ERRORREPORT:QUEUE new=['SendErrorReport'])) # /ERRORREPORT:SEND _Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY _Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET _Moved(_link, 'GenerateManifest', '', _boolean) _Moved(_link, 'IgnoreImportLibrary', '', _boolean) _Moved(_link, 'LinkIncremental', '', _newly_boolean) _Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean) _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean) _MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean) # These settings generate correctly in the MSVS output files when using # e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from # configuration entries, but result in spurious artifacts which can be # safely ignored here. See crbug.com/246570 _MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list) _MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list) _MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list) # MSBuild options not found in MSVS. _MSBuildOnly(_link, 'BuildingInIDE', _boolean) _MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH _MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false' _MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS _MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' _MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN _MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION _MSBuildOnly(_link, 'ForceFileOutput', _Enumeration([], new=['Enabled', # /FORCE # /FORCE:MULTIPLE 'MultiplyDefinedSymbolOnly', 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED _MSBuildOnly(_link, 'CreateHotPatchableImage', _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN 'X86Image', # /FUNCTIONPADMIN:5 'X64Image', # /FUNCTIONPADMIN:6 'ItaniumImage'])) # /FUNCTIONPADMIN:16 _MSBuildOnly(_link, 'CLRSupportLastError', _Enumeration([], new=['Enabled', # /CLRSupportLastError 'Disabled', # /CLRSupportLastError:NO # /CLRSupportLastError:SYSTEMDLL 'SystemDlls'])) # Directives for converting VCResourceCompilerTool to ResourceCompile. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for # the schema of the MSBuild ResourceCompile settings. _Same(_rc, 'AdditionalOptions', _string_list) _Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_rc, 'Culture', _Integer(msbuild_base=16)) _Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_rc, 'PreprocessorDefinitions', _string_list) # /D _Same(_rc, 'ResourceOutputFileName', _string) # /fo _Same(_rc, 'ShowProgress', _boolean) # /v # There is no UI in VisualStudio 2008 to set the following properties. # However they are found in CL and other tools. Include them here for # completeness, as they are very likely to have the same usage pattern. _Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo _Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u # MSBuild options not found in MSVS. _MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n _MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name) # Directives for converting VCMIDLTool to Midl. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for # the schema of the MSBuild Midl settings. _Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_midl, 'AdditionalOptions', _string_list) _Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt _Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation _Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check _Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum _Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref _Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data _Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf _Same(_midl, 'GenerateTypeLibrary', _boolean) _Same(_midl, 'HeaderFileName', _file_name) # /h _Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir _Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid _Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203 _Same(_midl, 'OutputDirectory', _string) # /out _Same(_midl, 'PreprocessorDefinitions', _string_list) # /D _Same(_midl, 'ProxyFileName', _file_name) # /proxy _Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o _Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo _Same(_midl, 'TypeLibraryName', _file_name) # /tlb _Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_midl, 'WarnAsError', _boolean) # /WX _Same(_midl, 'DefaultCharType', _Enumeration(['Unsigned', # /char unsigned 'Signed', # /char signed 'Ascii'])) # /char ascii7 _Same(_midl, 'TargetEnvironment', _Enumeration(['NotSet', 'Win32', # /env win32 'Itanium', # /env ia64 'X64'])) # /env x64 _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none 'All'])) # /error all _Same(_midl, 'StructMemberAlignment', _Enumeration(['NotSet', '1', # Zp1 '2', # Zp2 '4', # Zp4 '8'])) # Zp8 _Same(_midl, 'WarningLevel', _Enumeration(['0', # /W0 '1', # /W1 '2', # /W2 '3', # /W3 '4'])) # /W4 _Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata _Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters', _boolean) # /robust # MSBuild options not found in MSVS. _MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config _MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub _MSBuildOnly(_midl, 'GenerateClientFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'GenerateServerFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL _MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub _MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn _MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_midl, 'TypeLibFormat', _Enumeration([], new=['NewFormat', # /newtlb 'OldFormat'])) # /oldtlb # Directives for converting VCLibrarianTool to Lib. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for # the schema of the MSBuild Lib settings. _Same(_lib, 'AdditionalDependencies', _file_list) _Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH _Same(_lib, 'AdditionalOptions', _string_list) _Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT _Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE _Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_lib, 'OutputFile', _file_name) # /OUT _Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_lib, 'UseUnicodeResponseFiles', _boolean) _Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG _Same(_lib, 'TargetMachine', _target_machine_enumeration) # TODO(jeanluc) _link defines the same value that gets moved to # ProjectReference. We may want to validate that they are consistent. _Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean) # TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp. _MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list) _MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false' _MSBuildOnly(_lib, 'ErrorReporting', _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin', # /ERRORREPORT:QUEUE 'SendErrorReport', # /ERRORREPORT:SEND 'NoErrorReport'])) # /ERRORREPORT:NONE _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) _MSBuildOnly(_lib, 'Name', _file_name) # /NAME _MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX _MSBuildOnly(_lib, 'Verbose', _boolean) # Directives for converting VCManifestTool to Mt. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for # the schema of the MSBuild Lib settings. # Options that have the same name in MSVS and MSBuild _Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest _Same(_manifest, 'AdditionalOptions', _string_list) _Same(_manifest, 'AssemblyIdentity', _string) # /identity: _Same(_manifest, 'ComponentFileName', _file_name) # /dll _Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs _Same(_manifest, 'InputResourceManifests', _string) # /inputresource _Same(_manifest, 'OutputManifestFile', _file_name) # /out _Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs _Same(_manifest, 'ReplacementsFile', _file_name) # /replacements _Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo _Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb: _Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate _Same(_manifest, 'UpdateFileHashesSearchPath', _file_name) _Same(_manifest, 'VerboseOutput', _boolean) # /verbose # Options that have moved location. _MovedAndRenamed(_manifest, 'ManifestResourceFile', 'ManifestResourceCompile', 'ResourceOutputFileName', _file_name) _Moved(_manifest, 'EmbedManifest', '', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_manifest, 'DependencyInformationFile', _file_name) _MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean) _MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean) _MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category _MSBuildOnly(_manifest, 'ManifestFromManagedAssembly', _file_name) # /managedassemblyname _MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource _MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency _MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
stephenmcd/ratemyflight
refs/heads/master
ratemyflight/forms.py
1
from hashlib import md5 from django import forms from django.utils.translation import ugettext_lazy as _ from ratemyflight.models import Rating from ratemyflight.settings import GRAVATAR_SIZE class RatingForm(forms.ModelForm): email = forms.EmailField(label=_("Email"), required=False, help_text="Only used to retrieve your gravatar icon.") value = forms.FloatField(label=_("Rating"), help_text="From 1 to 10", max_value=10, min_value=0) class Meta: model = Rating exclude = ("avatar_url", "time", "tweet_id", "tweet_text") def __init__(self, *args, **kwargs): """ Set fields as required. """ super(RatingForm, self).__init__(*args, **kwargs) for (name, field) in self.fields.items(): if name != "email": field.required = True def save(self): """ Set the avatar's URL using gravatar and the given email address. """ rating = super(RatingForm, self).save() email = self.cleaned_data.get("email") if email: rating.avatar_url = "http://www.gravatar.com/avatar/%s?s=%s" % \ (md5(email).hexdigest(), GRAVATAR_SIZE) rating.save()
neilh10/micropython
refs/heads/master
tests/basics/slots_bool_len.py
118
class A: def __bool__(self): print('__bool__') return True def __len__(self): print('__len__') return 1 class B: def __len__(self): print('__len__') return 0 print(bool(A())) print(len(A())) print(bool(B())) print(len(B()))
sangit/heekscnc
refs/heads/master
pycnc/Tools.py
24
from Object import Object from Tool import Tool from consts import * import HeeksCNC class Tools(Object): def __init__(self): Object.__init__(self) def TypeName(self): return "Tools" def icon(self): # the name of the PNG file in the HeeksCNC icons folder return "tools" def CanBeDeleted(self): return False def AddToPopupMenu(self, menu): menu.AddItem("save as default tools", self.OnSaveDefault) def OnSaveDefault(self): self.save_default() def save_default(self): import cPickle f = open(HeeksCNC.heekscnc_path + "/default_tools.txt", "w") for tool in self.children: cPickle.dump(tool, f) f.close() def load_default(self): self.ClearChildren() try: f = open(HeeksCNC.heekscnc_path + "/default_tools.txt") except: # no default file found, add 2 tools self.Add(Tool(diameter = 3.0, type = TOOL_TYPE_SLOTCUTTER, tool_number = 1)) self.Add(Tool(diameter = 6.0, type = TOOL_TYPE_SLOTCUTTER, tool_number = 2)) return import cPickle from Object import next_object_index while True: try: tool = cPickle.load(f) except: break # end of file tool.index = next_object_index next_object_index = next_object_index + 1 self.Add(tool) f.close() def FindAllTools(self): tools = [] tools.append( (0, "No tool") ) for child in self.children: tools.append( (child.tool_number, child.name()) ) return tools def FindFirstTool(self, type): for child in self.children: if child.type == type: return child return None def FindTool(self, tool_number): for child in self.children: if child.tool_number == tool_number: return child return None
Wafflespeanut/servo
refs/heads/master
tests/wpt/update_css.py
116
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import os import sys here = os.path.split(__file__)[0] def wpt_path(*args): return os.path.join(here, *args) # Imports sys.path.append(wpt_path("harness")) from wptrunner import wptcommandline def update_tests(**kwargs): from wptrunner import update set_defaults(kwargs) logger = update.setup_logging(kwargs, {"mach": sys.stdout}) rv = update.run_update(logger, **kwargs) return 0 if rv is update.update.exit_clean else 1 def set_defaults(kwargs): if kwargs["product"] is None: kwargs["product"] = "servo" if kwargs["config"] is None: kwargs["config"] = wpt_path('config_css.ini') wptcommandline.set_from_config(kwargs) def main(): parser = wptcommandline.create_parser_update() kwargs = vars(parser.parse_args()) return update_tests(**kwargs) if __name__ == "__main__": sys.exit(0 if main() else 1)
brianhouse/housepy
refs/heads/master
lib/xbee/__init__.py
1
""" XBee package initalization file By Paul Malmsten, 2010 pmalmsten@gmail.com """ from .ieee import XBee from .zigbee import ZigBee
Mego/DataBot
refs/heads/master
SE-Chatbot/templates/ModuleTemplate.py
1
# The commands listed in this file can be read and loaded as a Module into a MetaModule by the load_module() function # Add necessary import to this file, including: # from Module import Command # import SaveIO # For if you want to save and load objects for this module. # save_subdir = '<subdir_name>' # Define a save subdirectory for this Module, must be unique in the project. If this is not set, saves and loads will fail. # SaveIO.save(<object>, save_subdir, <filename>) # Saves an object, filename does not need an extension. # SaveIO.load(save_subdir, <filename>) # Loads and returns an object, filename does not need an extension. # def on_bot_load(bot): # This will get called when the bot loads (after your module has been loaded in), use to perform additional setup for this module. # pass # def on_bot_stop(bot): # This will get called when the bot is stopping. # pass # def on_event(event, client, bot): # This will get called on any event (messages, new user entering the room, etc.) # pass # Logic for the commands goes here. # # def <command exec name>(cmd, bot, args, msg, event): # cmd refers to the Command you assign this function to # return "I'm in test1" # # def <command exec name>(cmd, bot, args, msg, event): # cmd refers to the Command you assign this function to # return "I'm in test1" # # ... commands = [ # A list of all Commands in this Module. # Command( '<command name>', <command exec name>, '<help text>' (optional), <needs privilege> (= False), <owner only> (= False), <special arg parsing method>(*) (= None), <aliases> (= None), <allowed chars> (= string.printable), <disallowed chars> (= None) (**) ), # Command( '<command name>', <command exec name>, '<help text>' (optional), <needs privilege> (= False), <owner only> (= False), <special arg parsing method>(*) (= None), <aliases> (= None), <allowed chars> (= string.printable), <disallowed chars> (= None) (**) ), # ... ] # (*) <special arg parsing method> = Some commands require a non-default argument parsing method. # Pass it there when necessary. It must return the array of arguments. # (**) Allowed and disallowed chars # You can choose to allow/disallow a specific set of characters in the command's arguments. # By default, the allowed chars is string.printable (see https://docs.python.org/3/library/string.html#string-constants for string constants). # If a char is both allowed and disallowed, disallowed has higher importance. # If allowed_chars is None, all chars are allowed (unless those specified in disallowed_chars). # module_name = "<name used to address this module>"
siosio/intellij-community
refs/heads/master
python/testData/refactoring/changeSignature/removeKeywordFromArgumentBeforeVararg.after.py
27
def f(y, x, *args): pass f(1, 42, 2, 3)
DMLoy/ECommerceBasic
refs/heads/master
lib/python2.7/site-packages/django/conf/locale/sk/__init__.py
12133432
einarhuseby/arctic
refs/heads/master
arctic/scripts/__init__.py
12133432
chand3040/cloud_that
refs/heads/named-release/cypress.rc
common/djangoapps/embargo/__init__.py
12133432
pablohoffman/scrapy
refs/heads/master
scrapy/contrib/__init__.py
12133432
lixt/lily2-gem5
refs/heads/master
ext/ply/test/lex_error1.py
174
# lex_error1.py # # Missing t_error() rule import sys if ".." not in sys.path: sys.path.insert(0,"..") import ply.lex as lex tokens = [ "PLUS", "MINUS", "NUMBER", ] t_PLUS = r'\+' t_MINUS = r'-' t_NUMBER = r'\d+' lex.lex()
kosior/eventful
refs/heads/master
eventful/common/validators.py
1
from django.core.exceptions import ValidationError from django.utils import timezone def validate_start_date(value): if value < timezone.now() + timezone.timedelta(hours=1): raise ValidationError('Start date must be at least an hour from now.')
nmayorov/scikit-learn
refs/heads/master
sklearn/datasets/mlcomp.py
289
# Copyright (c) 2010 Olivier Grisel <olivier.grisel@ensta.org> # License: BSD 3 clause """Glue code to load http://mlcomp.org data as a scikit.learn dataset""" import os import numbers from sklearn.datasets.base import load_files def _load_document_classification(dataset_path, metadata, set_=None, **kwargs): if set_ is not None: dataset_path = os.path.join(dataset_path, set_) return load_files(dataset_path, metadata.get('description'), **kwargs) LOADERS = { 'DocumentClassification': _load_document_classification, # TODO: implement the remaining domain formats } def load_mlcomp(name_or_id, set_="raw", mlcomp_root=None, **kwargs): """Load a datasets as downloaded from http://mlcomp.org Parameters ---------- name_or_id : the integer id or the string name metadata of the MLComp dataset to load set_ : select the portion to load: 'train', 'test' or 'raw' mlcomp_root : the filesystem path to the root folder where MLComp datasets are stored, if mlcomp_root is None, the MLCOMP_DATASETS_HOME environment variable is looked up instead. **kwargs : domain specific kwargs to be passed to the dataset loader. Read more in the :ref:`User Guide <datasets>`. Returns ------- data : Bunch Dictionary-like object, the interesting attributes are: 'filenames', the files holding the raw to learn, 'target', the classification labels (integer index), 'target_names', the meaning of the labels, and 'DESCR', the full description of the dataset. Note on the lookup process: depending on the type of name_or_id, will choose between integer id lookup or metadata name lookup by looking at the unzipped archives and metadata file. TODO: implement zip dataset loading too """ if mlcomp_root is None: try: mlcomp_root = os.environ['MLCOMP_DATASETS_HOME'] except KeyError: raise ValueError("MLCOMP_DATASETS_HOME env variable is undefined") mlcomp_root = os.path.expanduser(mlcomp_root) mlcomp_root = os.path.abspath(mlcomp_root) mlcomp_root = os.path.normpath(mlcomp_root) if not os.path.exists(mlcomp_root): raise ValueError("Could not find folder: " + mlcomp_root) # dataset lookup if isinstance(name_or_id, numbers.Integral): # id lookup dataset_path = os.path.join(mlcomp_root, str(name_or_id)) else: # assume name based lookup dataset_path = None expected_name_line = "name: " + name_or_id for dataset in os.listdir(mlcomp_root): metadata_file = os.path.join(mlcomp_root, dataset, 'metadata') if not os.path.exists(metadata_file): continue with open(metadata_file) as f: for line in f: if line.strip() == expected_name_line: dataset_path = os.path.join(mlcomp_root, dataset) break if dataset_path is None: raise ValueError("Could not find dataset with metadata line: " + expected_name_line) # loading the dataset metadata metadata = dict() metadata_file = os.path.join(dataset_path, 'metadata') if not os.path.exists(metadata_file): raise ValueError(dataset_path + ' is not a valid MLComp dataset') with open(metadata_file) as f: for line in f: if ":" in line: key, value = line.split(":", 1) metadata[key.strip()] = value.strip() format = metadata.get('format', 'unknow') loader = LOADERS.get(format) if loader is None: raise ValueError("No loader implemented for format: " + format) return loader(dataset_path, metadata, set_=set_, **kwargs)
stephen144/odoo
refs/heads/9.0
addons/website_membership/controllers/__init__.py
7372
import main
kwikadi/orange3
refs/heads/master
Orange/evaluation/clustering.py
17
import numpy as np from sklearn.metrics import silhouette_score, adjusted_mutual_info_score, silhouette_samples from Orange.data import Table from Orange.evaluation.testing import Results from Orange.evaluation.scoring import Score __all__ = ['ClusteringEvaluation'] class ClusteringResults(Results): def __init__(self, store_data=True, **kwargs): super().__init__(store_data=True, **kwargs) def get_fold(self, fold): results = ClusteringResults() results.data = self.data if self.folds is None: raise ValueError("This 'Results' instance does not have folds.") if self.models is not None: results.models = self.models[fold] results.row_indices = self.row_indices results.actual = self.actual results.predicted = self.predicted[:, fold, :] results.domain = self.domain return results class ClusteringScore(Score): considers_actual = False def from_predicted(self, results, score_function): # Clustering scores from labels if self.considers_actual: return np.fromiter( (score_function(results.actual.flatten(), predicted.flatten()) for predicted in results.predicted), dtype=np.float64, count=len(results.predicted)) # Clustering scores from data only else: return np.fromiter( (score_function(results.data.X, predicted.flatten()) for predicted in results.predicted), dtype=np.float64, count=len(results.predicted)) class Silhouette(ClusteringScore): separate_folds = True def compute_score(self, results): return self.from_predicted(results, silhouette_score) class AdjustedMutualInfoScore(ClusteringScore): separate_folds = True considers_actual = True def compute_score(self, results): return self.from_predicted(results, adjusted_mutual_info_score) class ClusteringEvaluation(ClusteringResults): """ Clustering evaluation. If the constructor is given the data and a list of learning algorithms, it runs clustering and returns an instance of `Results` containing the predicted clustering labels. .. attribute:: k The number of runs. """ def __init__(self, data, learners, k=1, store_models=False): super().__init__(data=data, nmethods=len(learners), store_data=True, store_models=store_models, predicted=None) self.k = k Y = data.Y.copy().flatten() self.predicted = np.empty((len(learners), self.k, len(data))) self.folds = range(k) self.row_indices = np.arange(len(data)) self.actual = data.Y.flatten() if hasattr(data, "Y") else None if self.store_models: self.models = [] for k in range(self.k): if self.store_models: fold_models = [] self.models.append(fold_models) for i, learner in enumerate(learners): model = learner(data) if self.store_models: fold_models.append(model) labels = model(data) self.predicted[i, k, :] = labels.X.flatten() def graph_silhouette(X, y, xlim=None, colors=None, figsize=None, filename=None): """ Silhouette plot. :param filename: Output file name. :param X Orange.data.Table or numpy.ndarray Data table. :param y Orange.data.Table or numpy.ndarray: Cluster labels (integers). :param colors list, optional (default = None): List of colors. If provided, it must equal the number of clusters. :param figsize tuple (float, float): Figure size (width, height) in inches. :param xlim tuple (float, float): Limit x-axis values. """ import matplotlib.pyplot as plt if isinstance(X, Table): X = X.X if isinstance(y, Table): y = y.X y = y.ravel() # Detect number of clusters and set colors N = len(set(y)) if isinstance(colors, type(None)) : colors = ["g" if i % 2 else "b" for i in range(N)] elif len(colors) != N: import sys sys.stderr.write("Number of colors does not match the number of clusters. \n") return # Silhouette coefficients s = silhouette_samples(X, y) s = s[np.argsort(y)] # Sort by clusters parts = [] # Within clusters sort by silhouette scores for label, (i, j) in enumerate([(sum(y == c1), sum(y == c1) + sum(y == c2)) for c1, c2 in zip(range(-1, N-1), range(0, N))]): scores = sorted(s[i:j]) parts.append((scores, label)) # Plot data if figsize: plt.figure(figsize=figsize) else: plt.figure() plt.title("Silhouette score") total = 0 centers = [] for i, (scores, label) in enumerate(parts): plt.barh(range(total, total + len(scores)), scores, color=colors[i], edgecolor=colors[i]) centers.append(total+len(scores)/2) total += len(scores) if not isinstance(xlim, type(None)): plt.xlim(xlim) plt.yticks(centers) plt.gca().set_yticklabels(range(N)) plt.ylabel("Cluster label") if filename: plt.savefig(filename) plt.close() else: plt.show()
davibe/gstreamersdk_pygobject
refs/heads/build
gi/types.py
3
# -*- Mode: Python; py-indent-offset: 4 -*- # vim: tabstop=4 shiftwidth=4 expandtab # # Copyright (C) 2005-2009 Johan Dahlin <johan@gnome.org> # # types.py: base types for introspected items. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from __future__ import absolute_import import sys import gobject from ._gi import \ InterfaceInfo, \ ObjectInfo, \ StructInfo, \ VFuncInfo, \ set_object_has_new_constructor, \ register_interface_info, \ hook_up_vfunc_implementation if sys.version_info > (3, 0): def callable(obj): return hasattr(obj, '__call__') def Function(info): def function(*args): return info.invoke(*args) function.__info__ = info function.__name__ = info.get_name() function.__module__ = info.get_namespace() return function def NativeVFunc(info, cls): def native_vfunc(*args): return info.invoke(*args, **dict(gtype=cls.__gtype__)) native_vfunc.__info__ = info native_vfunc.__name__ = info.get_name() native_vfunc.__module__ = info.get_namespace() return native_vfunc def Constructor(info): def constructor(cls, *args): cls_name = info.get_container().get_name() if cls.__name__ != cls_name: raise TypeError('%s constructor cannot be used to create instances of a subclass' % cls_name) return info.invoke(cls, *args) constructor.__info__ = info constructor.__name__ = info.get_name() constructor.__module__ = info.get_namespace() return constructor class MetaClassHelper(object): def _setup_constructors(cls): for method_info in cls.__info__.get_methods(): if method_info.is_constructor(): name = method_info.get_name() constructor = classmethod(Constructor(method_info)) setattr(cls, name, constructor) def _setup_methods(cls): for method_info in cls.__info__.get_methods(): name = method_info.get_name() function = Function(method_info) if method_info.is_method(): method = function elif method_info.is_constructor(): continue else: method = staticmethod(function) setattr(cls, name, method) def _setup_fields(cls): for field_info in cls.__info__.get_fields(): name = field_info.get_name().replace('-', '_') setattr(cls, name, property(field_info.get_value, field_info.set_value)) def _setup_constants(cls): for constant_info in cls.__info__.get_constants(): name = constant_info.get_name() value = constant_info.get_value() setattr(cls, name, value) def _setup_vfuncs(cls): for vfunc_name, py_vfunc in cls.__dict__.items(): if not vfunc_name.startswith("do_") or not callable(py_vfunc): continue # If a method name starts with "do_" assume it is a vfunc, and search # in the base classes for a method with the same name to override. # Recursion is not necessary here because getattr() searches all # super class attributes as well. vfunc_info = None for base in cls.__bases__: method = getattr(base, vfunc_name, None) if method is not None and hasattr(method, '__info__') and \ isinstance(method.__info__, VFuncInfo): vfunc_info = method.__info__ break # If we did not find a matching method name in the bases, we might # be overriding an interface virtual method. Since interfaces do not # provide implementations, there will be no method attribute installed # on the object. Instead we have to search through # InterfaceInfo.get_vfuncs(). Note that the infos returned by # get_vfuncs() use the C vfunc name (ie. there is no "do_" prefix). if vfunc_info is None: vfunc_info = find_vfunc_info_in_interface(cls.__bases__, vfunc_name[len("do_"):]) if vfunc_info is not None: assert vfunc_name == ('do_' + vfunc_info.get_name()) # Check to see if there are vfuncs with the same name in the bases. # We have no way of specifying which one we are supposed to override. ambiguous_base = find_vfunc_conflict_in_bases(vfunc_info, cls.__bases__) if ambiguous_base is not None: base_info = vfunc_info.get_container() raise TypeError('Method %s() on class %s.%s is ambiguous ' 'with methods in base classes %s.%s and %s.%s' % (vfunc_name, cls.__info__.get_namespace(), cls.__info__.get_name(), base_info.get_namespace(), base_info.get_name(), ambiguous_base.__info__.get_namespace(), ambiguous_base.__info__.get_name())) hook_up_vfunc_implementation(vfunc_info, cls.__gtype__, py_vfunc) def _setup_native_vfuncs(cls): # Only InterfaceInfo and ObjectInfo have the get_vfuncs() method. # We skip InterfaceInfo because interfaces have no implementations for vfuncs. # Also check if __info__ in __dict__, not hasattr('__info__', ...) # because we do not want to accidentally retrieve __info__ from a base class. class_info = cls.__dict__.get('__info__') if class_info is None or not isinstance(class_info, ObjectInfo): return for vfunc_info in class_info.get_vfuncs(): name = 'do_%s' % vfunc_info.get_name() value = NativeVFunc(vfunc_info, cls) setattr(cls, name, value) def find_vfunc_info_in_interface(bases, vfunc_name): for base in bases: # All wrapped interfaces inherit from GInterface. # This can be seen in IntrospectionModule.__getattr__() in module.py. # We do not need to search regular classes here, only wrapped interfaces. # We also skip GInterface, because it is not wrapped and has no __info__ attr. if base is gobject.GInterface or\ not issubclass(base, gobject.GInterface) or\ not isinstance(base.__info__, InterfaceInfo): continue for vfunc in base.__info__.get_vfuncs(): if vfunc.get_name() == vfunc_name: return vfunc vfunc = find_vfunc_info_in_interface(base.__bases__, vfunc_name) if vfunc is not None: return vfunc return None def find_vfunc_conflict_in_bases(vfunc, bases): for klass in bases: if not hasattr(klass, '__info__') or \ not hasattr(klass.__info__, 'get_vfuncs'): continue vfuncs = klass.__info__.get_vfuncs() vfunc_name = vfunc.get_name() for v in vfuncs: if v.get_name() == vfunc_name and v != vfunc: return klass aklass = find_vfunc_conflict_in_bases(vfunc, klass.__bases__) if aklass is not None: return aklass return None class GObjectMeta(gobject.GObjectMeta, MetaClassHelper): def __init__(cls, name, bases, dict_): super(GObjectMeta, cls).__init__(name, bases, dict_) is_gi_defined = False if cls.__module__ == 'gi.repository.' + cls.__info__.get_namespace(): is_gi_defined = True is_python_defined = False if not is_gi_defined and cls.__module__ != GObjectMeta.__module__: is_python_defined = True if is_python_defined: cls._setup_vfuncs() elif is_gi_defined: cls._setup_methods() cls._setup_constants() cls._setup_native_vfuncs() if isinstance(cls.__info__, ObjectInfo): cls._setup_fields() cls._setup_constructors() set_object_has_new_constructor(cls.__info__.get_g_type()) elif isinstance(cls.__info__, InterfaceInfo): register_interface_info(cls.__info__.get_g_type()) def mro(cls): return mro(cls) def _must_register_type(cls, namespace): ## don't register the class if already registered if '__gtype__' in namespace: return False # Do not register a new GType for the overrides, as this would sort of # defeat the purpose of overrides... return not cls.__module__.startswith('gi.overrides.') def mro(C): """Compute the class precedence list (mro) according to C3 Based on http://www.python.org/download/releases/2.3/mro/ Modified to consider that interfaces don't create the diamond problem """ # TODO: If this turns out being too slow, consider using generators bases = [] bases_of_subclasses = [[C]] if C.__bases__: bases_of_subclasses += list(map(mro, C.__bases__)) + [list(C.__bases__)] while bases_of_subclasses: for subclass_bases in bases_of_subclasses: candidate = subclass_bases[0] not_head = [s for s in bases_of_subclasses if candidate in s[1:]] if not_head and gobject.GInterface not in candidate.__bases__: candidate = None # conflict, reject candidate else: break if candidate is None: raise TypeError('Cannot create a consistent method resolution ' 'order (MRO)') bases.append(candidate) for subclass_bases in bases_of_subclasses[:]: # remove candidate if subclass_bases and subclass_bases[0] == candidate: del subclass_bases[0] if not subclass_bases: bases_of_subclasses.remove(subclass_bases) return bases class StructMeta(type, MetaClassHelper): def __init__(cls, name, bases, dict_): super(StructMeta, cls).__init__(name, bases, dict_) # Avoid touching anything else than the base class. g_type = cls.__info__.get_g_type() if g_type != gobject.TYPE_INVALID and g_type.pytype is not None: return cls._setup_fields() cls._setup_methods() cls._setup_constructors() for method_info in cls.__info__.get_methods(): if method_info.is_constructor() and \ method_info.get_name() == 'new' and \ not method_info.get_arguments(): cls.__new__ = staticmethod(Constructor(method_info)) break
davidecaminati/Domotics-Raspberry
refs/heads/master
Software/Android/Minix/cherrypy/test/_test_decorators.py
12
"""Test module for the @-decorator syntax, which is version-specific""" from cherrypy import expose, tools from cherrypy._cpcompat import ntob class ExposeExamples(object): @expose def no_call(self): return "Mr E. R. Bradshaw" @expose() def call_empty(self): return "Mrs. B.J. Smegma" @expose("call_alias") def nesbitt(self): return "Mr Nesbitt" @expose(["alias1", "alias2"]) def andrews(self): return "Mr Ken Andrews" @expose(alias="alias3") def watson(self): return "Mr. and Mrs. Watson" class ToolExamples(object): @expose @tools.response_headers(headers=[('Content-Type', 'application/data')]) def blah(self): yield ntob("blah") # This is here to demonstrate that _cp_config = {...} overwrites # the _cp_config attribute added by the Tool decorator. You have # to write _cp_config[k] = v or _cp_config.update(...) instead. blah._cp_config['response.stream'] = True
gallardjm/TUM_WebTech_DjangoExample
refs/heads/master
webtech_django1/box/migrations/0001_initial.py
1
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('storage', '0001_initial'), ] operations = [ migrations.CreateModel( name='Box', fields=[ ('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)), ('tag', models.CharField(max_length=100)), ('storage', models.ForeignKey(to='storage.Storage')), ], options={ }, bases=(models.Model,), ), ]
kustodian/ansible
refs/heads/devel
test/units/modules/network/netscaler/netscaler_module.py
68
import sys from units.compat.mock import patch, Mock from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase base_modules_mock = Mock() nitro_service_mock = Mock() nitro_exception_mock = Mock() base_modules_to_mock = { 'nssrc': base_modules_mock, 'nssrc.com': base_modules_mock, 'nssrc.com.citrix': base_modules_mock, 'nssrc.com.citrix.netscaler': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.resource': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.resource.config': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.exception': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.exception.nitro_exception': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.exception.nitro_exception.nitro_exception': nitro_exception_mock, 'nssrc.com.citrix.netscaler.nitro.service': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.service.nitro_service': base_modules_mock, 'nssrc.com.citrix.netscaler.nitro.service.nitro_service.nitro_service': nitro_service_mock, } nitro_base_patcher = patch.dict(sys.modules, base_modules_to_mock) class TestModule(ModuleTestCase): def failed(self): with self.assertRaises(AnsibleFailJson) as exc: self.module.main() result = exc.exception.args[0] self.assertTrue(result['failed'], result) return result def exited(self, changed=False): with self.assertRaises(AnsibleExitJson) as exc: self.module.main() result = exc.exception.args[0] return result
Sodki/ansible
refs/heads/devel
lib/ansible/modules/windows/__init__.py
12133432
tbeadle/django
refs/heads/master
tests/gis_tests/gis_migrations/migrations/__init__.py
12133432
rationalAgent/edx-platform-custom
refs/heads/master
common/djangoapps/student/__init__.py
12133432
jbzdak/edx-platform
refs/heads/master
openedx/core/djangoapps/theming/tests/__init__.py
12133432
ryfeus/lambda-packs
refs/heads/master
Tensorflow_Pandas_Numpy/source3.6/tensorflow/python/training/optimizer.py
13
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Base class for optimizers.""" # pylint: disable=g-bad-name from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc from tensorflow.python.eager import context from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import gradients from tensorflow.python.ops import math_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variables from tensorflow.python.training import slot_creator from tensorflow.python.util import nest def _get_variable_for(v): """Returns the ResourceVariable responsible for v, or v if not necessary.""" if context.in_eager_mode(): return v if v.op.type == "VarHandleOp": for var in variables.trainable_variables(): if (isinstance(var, resource_variable_ops.ResourceVariable) and var.handle.op is v.op): return var raise ValueError("Got %s but could not locate source variable." % (str(v))) return v def _deduplicate_indexed_slices(values, indices): """Sums `values` associated with any non-unique `indices`. Args: values: A `Tensor` with rank >= 1. indices: A one-dimensional integer `Tensor`, indexing into the first dimension of `values` (as in an IndexedSlices object). Returns: A tuple of (`summed_values`, `unique_indices`) where `unique_indices` is a de-duplicated version of `indices` and `summed_values` contains the sum of `values` slices associated with each unique index. """ unique_indices, new_index_positions = array_ops.unique(indices) summed_values = math_ops.unsorted_segment_sum( values, new_index_positions, array_ops.shape(unique_indices)[0]) return (summed_values, unique_indices) def _var_key(var): if context.in_eager_mode(): return var._shared_name # pylint: disable=protected-access return (var.op.graph, var.op.name) class _OptimizableVariable(object): """Interface for abstracting over variables in the optimizers.""" @abc.abstractmethod def target(self): """Returns the optimization target for this variable.""" raise NotImplementedError("Calling an abstract method.") @abc.abstractmethod def update_op(self, optimizer, g): """Returns the update ops for updating the variable.""" raise NotImplementedError("Calling an abstract method.") class _RefVariableProcessor(_OptimizableVariable): """Processor for Variable.""" def __init__(self, v): self._v = v def target(self): return self._v._ref() # pylint: disable=protected-access def update_op(self, optimizer, g): if isinstance(g, ops.Tensor): update_op = optimizer._apply_dense(g, self._v) # pylint: disable=protected-access if self._v.constraint is not None: with ops.control_dependencies([update_op]): return self._v.assign(self._v.constraint(self._v)) else: return update_op else: assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a " "tensor nor IndexedSlices.") if self._v.constraint is not None: raise RuntimeError( "Cannot use a constraint function on a sparse variable.") # pylint: disable=protected-access return optimizer._apply_sparse_duplicate_indices(g, self._v) class _DenseReadResourceVariableProcessor(_OptimizableVariable): """Processor for dense ResourceVariables.""" def __init__(self, v): self._v = v def target(self): return self._v def update_op(self, optimizer, g): # pylint: disable=protected-access update_op = optimizer._resource_apply_dense(g, self._v.op.inputs[0]) if self._v.constraint is not None: with ops.control_dependencies([update_op]): return self._v.assign(self._v.constraint(self._v)) else: return update_op class _DenseResourceVariableProcessor(_OptimizableVariable): """Processor for dense ResourceVariables.""" def __init__(self, v): self._v = v def target(self): return self._v def update_op(self, optimizer, g): # pylint: disable=protected-access if isinstance(g, ops.IndexedSlices): if self._v.constraint is not None: raise RuntimeError( "Cannot use a constraint function on a sparse variable.") return optimizer._resource_apply_sparse_duplicate_indices( g.values, self._v, g.indices) update_op = optimizer._resource_apply_dense(g, self._v) if self._v.constraint is not None: with ops.control_dependencies([update_op]): return self._v.assign(self._v.constraint(self._v)) else: return update_op class _StreamingModelPortProcessor(_OptimizableVariable): """Processor for streaming ModelPorts.""" def __init__(self, v): self._v = v def target(self): return self._v def update_op(self, optimizer, g): return g def _get_processor(v): """The processor of v.""" if context.in_eager_mode(): return _DenseResourceVariableProcessor(v) if v.op.type == "VarHandleOp": return _DenseResourceVariableProcessor(v) if isinstance(v, variables.Variable): return _RefVariableProcessor(v) if v.op.type == "SubmodelPort": return _StreamingModelPortProcessor(v) raise NotImplementedError("Trying to optimize unsupported type ", v) class Optimizer(object): """Base class for optimizers. This class defines the API to add Ops to train a model. You never use this class directly, but instead instantiate one of its subclasses such as `GradientDescentOptimizer`, `AdagradOptimizer`, or `MomentumOptimizer`. ### Usage ```python # Create an optimizer with the desired parameters. opt = GradientDescentOptimizer(learning_rate=0.1) # Add Ops to the graph to minimize a cost by updating a list of variables. # "cost" is a Tensor, and the list of variables contains tf.Variable # objects. opt_op = opt.minimize(cost, var_list=<list of variables>) ``` In the training program you will just have to run the returned Op. ```python # Execute opt_op to do one step of training: opt_op.run() ``` ### Processing gradients before applying them. Calling `minimize()` takes care of both computing the gradients and applying them to the variables. If you want to process the gradients before applying them you can instead use the optimizer in three steps: 1. Compute the gradients with `compute_gradients()`. 2. Process the gradients as you wish. 3. Apply the processed gradients with `apply_gradients()`. Example: ```python # Create an optimizer. opt = GradientDescentOptimizer(learning_rate=0.1) # Compute the gradients for a list of variables. grads_and_vars = opt.compute_gradients(loss, <list of variables>) # grads_and_vars is a list of tuples (gradient, variable). Do whatever you # need to the 'gradient' part, for example cap them, etc. capped_grads_and_vars = [(MyCapper(gv[0]), gv[1]) for gv in grads_and_vars] # Ask the optimizer to apply the capped gradients. opt.apply_gradients(capped_grads_and_vars) ``` ### Gating Gradients Both `minimize()` and `compute_gradients()` accept a `gate_gradients` argument that controls the degree of parallelism during the application of the gradients. The possible values are: `GATE_NONE`, `GATE_OP`, and `GATE_GRAPH`. <b>`GATE_NONE`</b>: Compute and apply gradients in parallel. This provides the maximum parallelism in execution, at the cost of some non-reproducibility in the results. For example the two gradients of `matmul` depend on the input values: With `GATE_NONE` one of the gradients could be applied to one of the inputs _before_ the other gradient is computed resulting in non-reproducible results. <b>`GATE_OP`</b>: For each Op, make sure all gradients are computed before they are used. This prevents race conditions for Ops that generate gradients for multiple inputs where the gradients depend on the inputs. <b>`GATE_GRAPH`</b>: Make sure all gradients for all variables are computed before any one of them is used. This provides the least parallelism but can be useful if you want to process all gradients before applying any of them. ### Slots Some optimizer subclasses, such as `MomentumOptimizer` and `AdagradOptimizer` allocate and manage additional variables associated with the variables to train. These are called <i>Slots</i>. Slots have names and you can ask the optimizer for the names of the slots that it uses. Once you have a slot name you can ask the optimizer for the variable it created to hold the slot value. This can be useful if you want to log debug a training algorithm, report stats about the slots, etc. """ # Values for gate_gradients. GATE_NONE = 0 GATE_OP = 1 GATE_GRAPH = 2 def __init__(self, use_locking, name): """Create a new Optimizer. This must be called by the constructors of subclasses. Args: use_locking: Bool. If True apply use locks to prevent concurrent updates to variables. name: A non-empty string. The name to use for accumulators created for the optimizer. Raises: ValueError: If name is malformed. """ if not name: raise ValueError("Must specify the optimizer name") self._use_locking = use_locking self._name = name # Dictionary of slots. # {slot_name : { variable_to_train: slot_for_the_variable, ...}, ... } self._slots = {} def get_name(self): return self._name def minimize(self, loss, global_step=None, var_list=None, gate_gradients=GATE_OP, aggregation_method=None, colocate_gradients_with_ops=False, name=None, grad_loss=None): """Add operations to minimize `loss` by updating `var_list`. This method simply combines calls `compute_gradients()` and `apply_gradients()`. If you want to process the gradient before applying them call `compute_gradients()` and `apply_gradients()` explicitly instead of using this function. Args: loss: A `Tensor` containing the value to minimize. global_step: Optional `Variable` to increment by one after the variables have been updated. var_list: Optional list or tuple of `Variable` objects to update to minimize `loss`. Defaults to the list of variables collected in the graph under the key `GraphKeys.TRAINABLE_VARIABLES`. gate_gradients: How to gate the computation of gradients. Can be `GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`. aggregation_method: Specifies the method used to combine gradient terms. Valid values are defined in the class `AggregationMethod`. colocate_gradients_with_ops: If True, try colocating gradients with the corresponding op. name: Optional name for the returned operation. grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`. Returns: An Operation that updates the variables in `var_list`. If `global_step` was not `None`, that operation also increments `global_step`. Raises: ValueError: If some of the variables are not `Variable` objects. """ grads_and_vars = self.compute_gradients( loss, var_list=var_list, gate_gradients=gate_gradients, aggregation_method=aggregation_method, colocate_gradients_with_ops=colocate_gradients_with_ops, grad_loss=grad_loss) vars_with_grad = [v for g, v in grads_and_vars if g is not None] if not vars_with_grad: raise ValueError( "No gradients provided for any variable, check your graph for ops" " that do not support gradients, between variables %s and loss %s." % ([str(v) for _, v in grads_and_vars], loss)) return self.apply_gradients(grads_and_vars, global_step=global_step, name=name) def compute_gradients(self, loss, var_list=None, gate_gradients=GATE_OP, aggregation_method=None, colocate_gradients_with_ops=False, grad_loss=None): """Compute gradients of `loss` for the variables in `var_list`. This is the first part of `minimize()`. It returns a list of (gradient, variable) pairs where "gradient" is the gradient for "variable". Note that "gradient" can be a `Tensor`, an `IndexedSlices`, or `None` if there is no gradient for the given variable. Args: loss: A Tensor containing the value to minimize. var_list: Optional list or tuple of `tf.Variable` to update to minimize `loss`. Defaults to the list of variables collected in the graph under the key `GraphKey.TRAINABLE_VARIABLES`. gate_gradients: How to gate the computation of gradients. Can be `GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`. aggregation_method: Specifies the method used to combine gradient terms. Valid values are defined in the class `AggregationMethod`. colocate_gradients_with_ops: If True, try colocating gradients with the corresponding op. grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`. Returns: A list of (gradient, variable) pairs. Variable is always present, but gradient can be `None`. Raises: TypeError: If `var_list` contains anything else than `Variable` objects. ValueError: If some arguments are invalid. """ if gate_gradients not in [Optimizer.GATE_NONE, Optimizer.GATE_OP, Optimizer.GATE_GRAPH]: raise ValueError("gate_gradients must be one of: Optimizer.GATE_NONE, " "Optimizer.GATE_OP, Optimizer.GATE_GRAPH. Not %s" % gate_gradients) self._assert_valid_dtypes([loss]) if grad_loss is not None: self._assert_valid_dtypes([grad_loss]) if var_list is None: var_list = ( variables.trainable_variables() + ops.get_collection(ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES)) else: var_list = nest.flatten(var_list) # pylint: disable=protected-access var_list += ops.get_collection(ops.GraphKeys._STREAMING_MODEL_PORTS) # pylint: enable=protected-access processors = [_get_processor(v) for v in var_list] if not var_list: raise ValueError("No variables to optimize.") var_refs = [p.target() for p in processors] grads = gradients.gradients( loss, var_refs, grad_ys=grad_loss, gate_gradients=(gate_gradients == Optimizer.GATE_OP), aggregation_method=aggregation_method, colocate_gradients_with_ops=colocate_gradients_with_ops) if gate_gradients == Optimizer.GATE_GRAPH: grads = control_flow_ops.tuple(grads) grads_and_vars = list(zip(grads, var_list)) self._assert_valid_dtypes( [v for g, v in grads_and_vars if g is not None and v.dtype != dtypes.resource]) return grads_and_vars def apply_gradients(self, grads_and_vars, global_step=None, name=None): """Apply gradients to variables. This is the second part of `minimize()`. It returns an `Operation` that applies gradients. Args: grads_and_vars: List of (gradient, variable) pairs as returned by `compute_gradients()`. global_step: Optional `Variable` to increment by one after the variables have been updated. name: Optional name for the returned operation. Default to the name passed to the `Optimizer` constructor. Returns: An `Operation` that applies the specified gradients. If `global_step` was not None, that operation also increments `global_step`. Raises: TypeError: If `grads_and_vars` is malformed. ValueError: If none of the variables have gradients. """ # This is a default implementation of apply_gradients() that can be shared # by most optimizers. It relies on the subclass implementing the following # methods: _create_slots(), _prepare(), _apply_dense(), and _apply_sparse(). grads_and_vars = tuple(grads_and_vars) # Make sure repeat iteration works. if not grads_and_vars: raise ValueError("No variables provided.") converted_grads_and_vars = [] for g, v in grads_and_vars: if g is not None: try: # Convert the grad to Tensor or IndexedSlices if necessary. g = ops.convert_to_tensor_or_indexed_slices(g) except TypeError: raise TypeError( "Gradient must be convertible to a Tensor" " or IndexedSlices, or None: %s" % g) if not isinstance(g, (ops.Tensor, ops.IndexedSlices)): raise TypeError( "Gradient must be a Tensor, IndexedSlices, or None: %s" % g) p = _get_processor(v) converted_grads_and_vars.append((g, v, p)) converted_grads_and_vars = tuple(converted_grads_and_vars) var_list = [v for g, v, _ in converted_grads_and_vars if g is not None] if not var_list: raise ValueError("No gradients provided for any variable: %s." % ([str(v) for _, _, v in converted_grads_and_vars],)) with ops.control_dependencies(None): self._create_slots([_get_variable_for(v) for v in var_list]) update_ops = [] with ops.name_scope(name, self._name) as name: self._prepare() for grad, var, processor in converted_grads_and_vars: if grad is None: continue # We colocate all ops created in _apply_dense or _apply_sparse # on the same device as the variable. # TODO(apassos): figure out how to get the variable name here. scope_name = var.op.name if context.in_graph_mode() else "" with ops.name_scope("update_" + scope_name), ops.colocate_with(var): update_ops.append(processor.update_op(self, grad)) if global_step is None: apply_updates = self._finish(update_ops, name) else: with ops.control_dependencies([self._finish(update_ops, "update")]): with ops.colocate_with(global_step): apply_updates = state_ops.assign_add(global_step, 1, name=name).op train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP) if apply_updates not in train_op: train_op.append(apply_updates) return apply_updates def get_slot(self, var, name): """Return a slot named `name` created for `var` by the Optimizer. Some `Optimizer` subclasses use additional variables. For example `Momentum` and `Adagrad` use variables to accumulate updates. This method gives access to these `Variable` objects if for some reason you need them. Use `get_slot_names()` to get the list of slot names created by the `Optimizer`. Args: var: A variable passed to `minimize()` or `apply_gradients()`. name: A string. Returns: The `Variable` for the slot if it was created, `None` otherwise. """ named_slots = self._slots.get(name, None) if not named_slots: return None return named_slots.get(_var_key(var), None) def get_slot_names(self): """Return a list of the names of slots created by the `Optimizer`. See `get_slot()`. Returns: A list of strings. """ return sorted(self._slots.keys()) def _assert_valid_dtypes(self, tensors): """Asserts tensors are all valid types (see `_valid_dtypes`). Args: tensors: Tensors to check. Raises: ValueError: If any tensor is not a valid type. """ valid_dtypes = self._valid_dtypes() for t in tensors: dtype = t.dtype.base_dtype if dtype not in valid_dtypes: raise ValueError( "Invalid type %r for %s, expected: %s." % ( dtype, t.name, [v for v in valid_dtypes])) # -------------- # Methods to be implemented by subclasses if they want to use the # inherited implementation of apply_gradients() or compute_gradients(). # -------------- def _valid_dtypes(self): """Valid types for loss, variables and gradients. Subclasses should override to allow other float types. Returns: Valid types for loss, variables and gradients. """ return set([dtypes.float16, dtypes.float32, dtypes.float64]) def _create_slots(self, var_list): """Create all slots needed by the variables. Args: var_list: A list of `Variable` objects. """ # No slots needed by default pass def _prepare(self): """Create all needed tensors before applying gradients. This is called with the name_scope using the "name" that users have chosen for the application of gradients. """ pass def _apply_dense(self, grad, var): """Add ops to apply dense gradients to `var`. Args: grad: A `Tensor`. var: A `Variable` object. Returns: An `Operation`. """ raise NotImplementedError() def _resource_apply_dense(self, grad, handle): """Add ops to apply dense gradients to the variable `handle`. Args: grad: a `Tensor` representing the gradient. handle: a `Tensor` of dtype `resource` which points to the variable to be updated. Returns: An `Operation` which updates the value of the variable. """ raise NotImplementedError() def _resource_apply_sparse_duplicate_indices(self, grad, handle, indices): """Add ops to apply sparse gradients to `handle`, with repeated indices. Optimizers which override this method must deal with repeated indices. See the docstring of `_apply_sparse_duplicate_indices` for details. By default the correct behavior, to sum non-unique indices and their associated gradients, is enforced by first pre-processing `grad` and `indices` and passing them on to `_resource_apply_sparse`. Optimizers which deal correctly with duplicate indices may instead override this method to avoid the overhead of summing. Args: grad: a `Tensor` representing the gradient for the affected indices. handle: a `Tensor` of dtype `resource` which points to the variable to be updated. indices: a `Tensor` of integral type representing the indices for which the gradient is nonzero. Indices may be repeated. Returns: An `Operation` which updates the value of the variable. """ summed_grad, unique_indices = _deduplicate_indexed_slices( values=grad, indices=indices) return self._resource_apply_sparse(summed_grad, handle, unique_indices) def _resource_apply_sparse(self, grad, handle, indices): """Add ops to apply sparse gradients to the variable `handle`. Similar to `_apply_sparse`, the `indices` argument to this method has been de-duplicated. Optimizers which deal correctly with non-unique indices may instead override `_resource_apply_sparse_duplicate_indices` to avoid this overhead. Args: grad: a `Tensor` representing the gradient for the affected indices. handle: a `Tensor` of dtype `resource` which points to the variable to be updated. indices: a `Tensor` of integral type representing the indices for which the gradient is nonzero. Indices are unique. Returns: An `Operation` which updates the value of the variable. """ raise NotImplementedError() def _apply_sparse_duplicate_indices(self, grad, var): """Add ops to apply sparse gradients to `var`, with repeated sparse indices. Optimizers which override this method must deal with IndexedSlices objects such as the following: IndexedSlicesValue(values=[1, 1], indices=[0, 0], dense_shape=[1]) The correct interpretation is: IndexedSlicesValue(values=[2], indices=[0], dense_shape=[1]) Many optimizers deal incorrectly with repeated indices when updating based on sparse gradients (e.g. summing squares rather than squaring the sum, or applying momentum terms multiple times). Adding first is always the correct behavior, so this is enforced here by reconstructing the IndexedSlices to have only unique indices, then calling _apply_sparse. Optimizers which deal correctly with repeated indices may instead override this method to avoid the overhead of summing indices. Args: grad: `IndexedSlices`. var: A `Variable` object. Returns: An `Operation`. """ summed_values, unique_indices = _deduplicate_indexed_slices( values=grad.values, indices=grad.indices) gradient_no_duplicate_indices = ops.IndexedSlices( indices=unique_indices, values=summed_values, dense_shape=grad.dense_shape) return self._apply_sparse(gradient_no_duplicate_indices, var) def _apply_sparse(self, grad, var): """Add ops to apply sparse gradients to `var`. The IndexedSlices object passed to `grad` in this function is by default pre-processed in `_apply_sparse_duplicate_indices` to remove duplicate indices (see its docstring for details). Optimizers which can tolerate or have correct special cases for duplicate sparse indices may override `_apply_sparse_duplicate_indices` instead of this function, avoiding that overhead. Args: grad: `IndexedSlices`, with no repeated indices. var: A `Variable` object. Returns: An `Operation`. """ raise NotImplementedError() def _finish(self, update_ops, name_scope): """Do what is needed to finish the update. This is called with the `name_scope` using the "name" that users have chosen for the application of gradients. Args: update_ops: List of `Operation` objects to update variables. This list contains the values returned by the `_apply_dense()` and `_apply_sparse()` calls. name_scope: String. Name to use for the returned operation. Returns: The operation to apply updates. """ return control_flow_ops.group(*update_ops, name=name_scope) # -------------- # Utility methods for subclasses. # -------------- def _slot_dict(self, slot_name): """Returns a dict for caching slots created under the given name. Args: slot_name: Name for the slot. Returns: A dict that maps primary `Variable` objects to the slot created for that variable, under the given slot name. """ named_slots = self._slots.get(slot_name, None) if named_slots is None: named_slots = {} self._slots[slot_name] = named_slots return named_slots def _get_or_make_slot(self, var, val, slot_name, op_name): """Find or create a slot for a variable. Args: var: A `Variable` object. val: A `Tensor`. The initial value of the slot. slot_name: Name for the slot. op_name: Name to use when scoping the Variable that needs to be created for the slot. Returns: A `Variable` object. """ named_slots = self._slot_dict(slot_name) if _var_key(var) not in named_slots: named_slots[_var_key(var)] = slot_creator.create_slot(var, val, op_name) return named_slots[_var_key(var)] def _get_or_make_slot_with_initializer(self, var, initializer, shape, dtype, slot_name, op_name): """Find or create a slot for a variable, using an Initializer. Args: var: A `Variable` object. initializer: An `Initializer`. The initial value of the slot. shape: Shape of the initial value of the slot. dtype: Type of the value of the slot. slot_name: Name for the slot. op_name: Name to use when scoping the Variable that needs to be created for the slot. Returns: A `Variable` object. """ named_slots = self._slot_dict(slot_name) if _var_key(var) not in named_slots: named_slots[_var_key(var)] = slot_creator.create_slot_with_initializer( var, initializer, shape, dtype, op_name) return named_slots[_var_key(var)] def _zeros_slot(self, var, slot_name, op_name): """Find or create a slot initialized with 0.0. Args: var: A `Variable` object. slot_name: Name for the slot. op_name: Name to use when scoping the Variable that needs to be created for the slot. Returns: A `Variable` object. """ named_slots = self._slot_dict(slot_name) if _var_key(var) not in named_slots: named_slots[_var_key(var)] = slot_creator.create_zeros_slot(var, op_name) return named_slots[_var_key(var)]
xq262144/hue
refs/heads/master
desktop/core/ext-py/Django-1.6.10/django/conf/locale/es_AR/formats.py
118
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = r'j N Y' TIME_FORMAT = r'H:i:s' DATETIME_FORMAT = r'j N Y H:i:s' YEAR_MONTH_FORMAT = r'F Y' MONTH_DAY_FORMAT = r'j \d\e F' SHORT_DATE_FORMAT = r'd/m/Y' SHORT_DATETIME_FORMAT = r'd/m/Y H:i' FIRST_DAY_OF_WEEK = 0 # 0: Sunday, 1: Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d/%m/%Y', # '31/12/2009' '%d/%m/%y', # '31/12/09' ) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M:%S.%f', '%d/%m/%Y %H:%M', '%d/%m/%y %H:%M:%S', '%d/%m/%y %H:%M:%S.%f', '%d/%m/%y %H:%M', ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' NUMBER_GROUPING = 3
ianmtaylor1/pacal
refs/heads/master
pacal/examples/springer_book/Chapter5_functions.py
1
#! #!----------------------- #! CHAPTER 5 - FUNCTIONS #!----------------------- #! from functools import partial import numpy import time from pylab import figure, show from pacal import * from pacal.distr import demo_distr if __name__ == "__main__": tic = time.time() #! Example 5.1.3 d = NormalDistr() + NormalDistr() * NormalDistr() demo_distr(d) #! Example 5.5 d = ExponentialDistr() / (ExponentialDistr() + ExponentialDistr()) figure() demo_distr(d, xmax=20, ymax=1.5) #! Exercise 5.5 #! part a figure() demo_distr(NormalDistr() / sqrt((NormalDistr()**2 + NormalDistr()**2) / 2), xmin=-3, xmax=3) #! part b figure() demo_distr(2 * NormalDistr()**2 / (NormalDistr()**2 + NormalDistr()**2), xmax=20, ymax=2) #! part c figure() demo_distr(3 * NormalDistr()**2 / (NormalDistr()**2 + NormalDistr()**2 + NormalDistr()**2), xmax=20, ymax=2) #! part d figure() demo_distr((NormalDistr()**2 + NormalDistr()**2) / (NormalDistr()**2 + NormalDistr()**2), xmax=20) #! Exercise 5.6 d = sqrt(UniformDistr(0,1)**2 + UniformDistr(0,1)**2) # a bug in Springer?? def theor_ampl_uni(x): return (x<1)*numpy.pi/2*x + (x>=1)*(2*numpy.arcsin(1.0/x) - 0*numpy.pi/2) figure() #demo_distr(d, theoretical = theor_ampl_uni, histogram=True) demo_distr(d) print("time=", time.time() - tic) show()
petewarden/tensorflow_makefile
refs/heads/master
tensorflow/examples/skflow/resnet.py
4
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This example builds deep residual network for mnist data. Reference Paper: http://arxiv.org/pdf/1512.03385.pdf Note that this is still a work-in-progress. Feel free to submit a PR to make this better. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from collections import namedtuple from math import sqrt from sklearn import metrics import tensorflow as tf from tensorflow.examples.tutorials.mnist import input_data from tensorflow.contrib import learn def res_net(x, y, activation=tf.nn.relu): """Builds a residual network. Note that if the input tensor is 2D, it must be square in order to be converted to a 4D tensor. Borrowed structure from here: https://github.com/pkmital/tensorflow_tutorials/blob/master/10_residual_network.py Args: x: Input of the network y: Output of the network activation: Activation function to apply after each convolution """ # Configurations for each bottleneck block BottleneckBlock = namedtuple( 'BottleneckBlock', ['num_layers', 'num_filters', 'bottleneck_size']) blocks = [BottleneckBlock(3, 128, 32), BottleneckBlock(3, 256, 64), BottleneckBlock(3, 512, 128), BottleneckBlock(3, 1024, 256)] input_shape = x.get_shape().as_list() # Reshape the input into the right shape if it's 2D tensor if len(input_shape) == 2: ndim = int(sqrt(input_shape[1])) x = tf.reshape(x, [-1, ndim, ndim, 1]) # First convolution expands to 64 channels with tf.variable_scope('conv_layer1'): net = learn.ops.conv2d(x, 64, [7, 7], batch_norm=True, activation=activation, bias=False) # Max pool net = tf.nn.max_pool( net, [1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME') # First chain of resnets with tf.variable_scope('conv_layer2'): net = learn.ops.conv2d(net, blocks[0].num_filters, [1, 1], [1, 1, 1, 1], padding='VALID', bias=True) # Create each bottleneck building block for each layer for block_i, block in enumerate(blocks): for layer_i in range(block.num_layers): name = 'block_%d/layer_%d' % (block_i, layer_i) # 1x1 convolution responsible for reducing dimension with tf.variable_scope(name + '/conv_in'): conv = learn.ops.conv2d(net, block.bottleneck_size, [1, 1], [1, 1, 1, 1], padding='VALID', activation=activation, batch_norm=True, bias=False) with tf.variable_scope(name + '/conv_bottleneck'): conv = learn.ops.conv2d(conv, block.bottleneck_size, [3, 3], [1, 1, 1, 1], padding='SAME', activation=activation, batch_norm=True, bias=False) # 1x1 convolution responsible for restoring dimension with tf.variable_scope(name + '/conv_out'): conv = learn.ops.conv2d(conv, block.num_filters, [1, 1], [1, 1, 1, 1], padding='VALID', activation=activation, batch_norm=True, bias=False) # shortcut connections that turn the network into its counterpart # residual function (identity shortcut) net = conv + net try: # upscale to the next block size next_block = blocks[block_i + 1] with tf.variable_scope('block_%d/conv_upscale' % block_i): net = learn.ops.conv2d(net, next_block.num_filters, [1, 1], [1, 1, 1, 1], bias=False, padding='SAME') except IndexError: pass net_shape = net.get_shape().as_list() net = tf.nn.avg_pool(net, ksize=[1, net_shape[1], net_shape[2], 1], strides=[1, 1, 1, 1], padding='VALID') net_shape = net.get_shape().as_list() net = tf.reshape(net, [-1, net_shape[1] * net_shape[2] * net_shape[3]]) return learn.models.logistic_regression(net, y) # Download and load MNIST data. mnist = input_data.read_data_sets('MNIST_data') # Restore model if graph is saved into a folder. if os.path.exists("models/resnet/graph.pbtxt"): classifier = learn.TensorFlowEstimator.restore("models/resnet/") else: # Create a new resnet classifier. classifier = learn.TensorFlowEstimator( model_fn=res_net, n_classes=10, batch_size=100, steps=100, learning_rate=0.001, continue_training=True) while True: # Train model and save summaries into logdir. classifier.fit(mnist.train.images, mnist.train.labels, logdir="models/resnet/") # Calculate accuracy. score = metrics.accuracy_score( mnist.test.labels, classifier.predict(mnist.test.images, batch_size=64)) print('Accuracy: {0:f}'.format(score)) # Save model graph and checkpoints. classifier.save("models/resnet/")
sahutd/youtube-dl
refs/heads/master
youtube_dl/extractor/xboxclips.py
132
# encoding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( int_or_none, parse_filesize, unified_strdate, ) class XboxClipsIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?xboxclips\.com/(?:video\.php\?.*vid=|[^/]+/)(?P<id>[\w-]{36})' _TEST = { 'url': 'https://xboxclips.com/video.php?uid=2533274823424419&gamertag=Iabdulelah&vid=074a69a9-5faf-46aa-b93b-9909c1720325', 'md5': 'fbe1ec805e920aeb8eced3c3e657df5d', 'info_dict': { 'id': '074a69a9-5faf-46aa-b93b-9909c1720325', 'ext': 'mp4', 'title': 'Iabdulelah playing Titanfall', 'filesize_approx': 26800000, 'upload_date': '20140807', 'duration': 56, } } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) video_url = self._html_search_regex( r'>(?:Link|Download): <a[^>]+href="([^"]+)"', webpage, 'video URL') title = self._html_search_regex( r'<title>XboxClips \| ([^<]+)</title>', webpage, 'title') upload_date = unified_strdate(self._html_search_regex( r'>Recorded: ([^<]+)<', webpage, 'upload date', fatal=False)) filesize = parse_filesize(self._html_search_regex( r'>Size: ([^<]+)<', webpage, 'file size', fatal=False)) duration = int_or_none(self._html_search_regex( r'>Duration: (\d+) Seconds<', webpage, 'duration', fatal=False)) view_count = int_or_none(self._html_search_regex( r'>Views: (\d+)<', webpage, 'view count', fatal=False)) return { 'id': video_id, 'url': video_url, 'title': title, 'upload_date': upload_date, 'filesize_approx': filesize, 'duration': duration, 'view_count': view_count, }
danlrobertson/servo
refs/heads/master
tests/wpt/web-platform-tests/tools/wpt/markdown.py
43
from functools import reduce def format_comment_title(product): """Produce a Markdown-formatted string based on a given "product"--a string containing a browser identifier optionally followed by a colon and a release channel. (For example: "firefox" or "chrome:dev".) The generated title string is used both to create new comments and to locate (and subsequently update) previously-submitted comments.""" parts = product.split(":") title = parts[0].title() if len(parts) > 1: title += " (%s)" % parts[1] return "# %s #" % title def markdown_adjust(s): """Escape problematic markdown sequences.""" s = s.replace('\t', u'\\t') s = s.replace('\n', u'\\n') s = s.replace('\r', u'\\r') s = s.replace('`', u'') s = s.replace('|', u'\\|') return s def table(headings, data, log): """Create and log data to specified logger in tabular format.""" cols = range(len(headings)) assert all(len(item) == len(cols) for item in data) max_widths = reduce(lambda prev, cur: [(len(cur[i]) + 2) if (len(cur[i]) + 2) > prev[i] else prev[i] for i in cols], data, [len(item) + 2 for item in headings]) log("|%s|" % "|".join(item.center(max_widths[i]) for i, item in enumerate(headings))) log("|%s|" % "|".join("-" * max_widths[i] for i in cols)) for row in data: log("|%s|" % "|".join(" %s" % row[i].ljust(max_widths[i] - 1) for i in cols)) log("")
wasade/improved-octo-waddle
refs/heads/master
cdef_bp_tests.py
1
import bp.tests.test_bp_cy for n in dir(bp.tests.test_bp_cy): if n.startswith('test_'): getattr(bp.tests.test_bp_cy, n)()
alexcuellar/odoo
refs/heads/8.0
addons/website_sale/tests/test_sale_process.py
347
import openerp.tests @openerp.tests.common.at_install(False) @openerp.tests.common.post_install(True) class TestUi(openerp.tests.HttpCase): def test_01_admin_shop_tour(self): self.phantom_js("/", "openerp.Tour.run('shop', 'test')", "openerp.Tour.tours.shop", login="admin") def test_02_admin_checkout(self): self.phantom_js("/", "openerp.Tour.run('shop_buy_product', 'test')", "openerp.Tour.tours.shop_buy_product", login="admin") def test_03_demo_checkout(self): self.phantom_js("/", "openerp.Tour.run('shop_buy_product', 'test')", "openerp.Tour.tours.shop_buy_product", login="demo") def test_04_public_checkout(self): self.phantom_js("/", "openerp.Tour.run('shop_buy_product', 'test')", "openerp.Tour.tours.shop_buy_product")
danlrobertson/servo
refs/heads/master
tests/wpt/web-platform-tests/tools/third_party/attrs/src/attr/filters.py
57
""" Commonly useful filters for :func:`attr.asdict`. """ from __future__ import absolute_import, division, print_function from ._compat import isclass from ._make import Attribute def _split_what(what): """ Returns a tuple of `frozenset`s of classes and attributes. """ return ( frozenset(cls for cls in what if isclass(cls)), frozenset(cls for cls in what if isinstance(cls, Attribute)), ) def include(*what): """ Whitelist *what*. :param what: What to whitelist. :type what: :class:`list` of :class:`type` or :class:`attr.Attribute`\\ s :rtype: :class:`callable` """ cls, attrs = _split_what(what) def include_(attribute, value): return value.__class__ in cls or attribute in attrs return include_ def exclude(*what): """ Blacklist *what*. :param what: What to blacklist. :type what: :class:`list` of classes or :class:`attr.Attribute`\\ s. :rtype: :class:`callable` """ cls, attrs = _split_what(what) def exclude_(attribute, value): return value.__class__ not in cls and attribute not in attrs return exclude_
adamjmcgrath/glancydesign
refs/heads/master
src/djangotoolbox/setup.py
9
from setuptools import setup, find_packages DESCRIPTION = "Djangotoolbox for Django-nonrel" LONG_DESCRIPTION = None try: LONG_DESCRIPTION = open('README.rst').read() except: pass CLASSIFIERS = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: BSD License', ] setup(name='djangotoolbox', packages=find_packages(exclude=('tests', 'tests.*')), author='Waldemar Kornewald', author_email='wkornewald@gmail.com', url='http://www.allbuttonspressed.com/projects/djangotoolbox', description=DESCRIPTION, long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, install_requires=[], version='0.9.2', )
dropbox/XCoverage
refs/heads/master
TestApp/build_scripts/export_build_vars.py
2
#!/usr/bin/env python # Copyright (c) 2015 Dropbox, Inc import os import sys import json EXPORT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '.build_vars') def get_build_vars(): with open(EXPORT_PATH) as fd: ret = json.load(fd) return ret def write_vars_to_file(kv_list): with open(EXPORT_PATH, 'w') as fd: var_dict = dict([kv.split('=') for kv in kv_list]) json.dump(var_dict ,fd) def print_help(): print ''' Usage: python gen_export.py <output path> [<var>=<value> ...] writes json file to output path ''' if __name__ == '__main__': if not len(sys.argv) > 1: print_help() else: kv_list = sys.argv[1:] write_vars_to_file(kv_list)
hei-hilman/microblog
refs/heads/master
db_repository/__init__.py
12133432
webostin/django-btc
refs/heads/master
tests/staticfiles_tests/apps/no_label/__init__.py
12133432
PolymorhicCode/Veil-Evasion
refs/heads/master
modules/payloads/c/__init__.py
12133432
aitormf/JdeRobot
refs/heads/master
src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/__init__.py
12133432
jk977/twitch-plays
refs/heads/master
bot/tests/__init__.py
12133432
dfunckt/django
refs/heads/master
tests/dispatch/__init__.py
12133432
audaciouscode/Books-Mac-OS-X
refs/heads/master
Versions/Books_3.0b5/Bundled plugins/Amazon (JP).plugin/Contents/Resources/amazonScript.py
12
#!/usr/bin/python from amazon import Bag from xml.dom.minidom import Document, parse from difflib import SequenceMatcher from string import replace import amazon import sys searchLocale = "jp" fieldMap = { "Asin" : "ASIN", "Authors" : "Authors", "ImageUrlLarge" : "CoverImageURL", "ImageUrlMedium" : "ImageUrlMedium", "ImageUrlSmall" : "ImageUrlSmall", "Isbn" : "isbn", "ListPrice" : "originalValue", "Manufacturer" : "publisher", "Media" : "format", "OurPrice" : "presentValue", "UsedPrice" : "UsedPrice", "ProductName" : "title", "ReleaseDate" : "publishDate", "URL" : "url", "Reviews" : "reviews", "ProductDescription" : "summary", "Catalog" : "Catalog" } book = None dom = parse ("/tmp/books-quickfill.xml") fields = dom.getElementsByTagName ("field") title = "" authors = "" publisher = "" upc = None isbn = None for field in fields: field.normalize () fieldData = None if (field.firstChild != None): fieldData = replace (replace (replace (field.firstChild.data, "&", ""), "(", ""), ")", ""); if (fieldData != None): if (field.getAttribute ("name") == "title"): title = fieldData elif (field.getAttribute ("name") == "authors"): authors = fieldData elif (field.getAttribute ("name") == "isbn"): isbn = fieldData elif (field.getAttribute ("name") == "upc"): upc = fieldData elif (field.getAttribute ("name") == "publisher"): publisher = fieldData pythonBooks = None if (isbn != None): isbn = replace (replace (isbn, "-", ""), " ", ""); pythonBooks = amazon.searchByASIN (isbn, locale=searchLocale) if (pythonBooks[0] != None): book = pythonBooks[0] # if (book == None and upc != None): # pythonBooks = amazon.searchByUPC (upc, locale=searchLocale) # # if (pythonBooks[0] != None): # book = pythonBooks[0] if (book == None and title != ""): query = "title:" + title if (authors != ""): query = query + " and author:" + authors if (publisher != ""): query = query + " and publisher:" + publisher pythonBooks = amazon.searchByPower (query, locale=searchLocale) if (pythonBooks[0] != None): book = pythonBooks[0] doc = Document () root = doc.createElement ("importedData") doc.appendChild (root) searchMode = "books" if (searchLocale != "us"): searchMode = "books-" + searchLocale if (book != None): collection = doc.createElement ("List") collection.setAttribute ("name", "Amazon Import") root.appendChild (collection) for book in pythonBooks: bookElement = doc.createElement ("Book") bookElement.setAttribute ("title", book.ProductName) for key in fieldMap.keys(): name = fieldMap[key] if name == None: name = key value = None try: value = getattr(book, key) except AttributeError: pass if (value != None): if (isinstance (value, Bag)): if (key == "Authors"): authors = "" if (isinstance (value.Author, list)): for author in value.Author: authors += author + ", " else: authors += value.Author fieldElement = doc.createElement ("field") fieldElement.setAttribute ("name", "authors"); textElement = doc.createTextNode (authors) fieldElement.appendChild (textElement) bookElement.appendChild (fieldElement) elif (key == "Reviews"): fieldElement = doc.createElement ("field") fieldElement.setAttribute ("name", "hasreviews"); textElement = doc.createTextNode ("true") fieldElement.appendChild (textElement) bookElement.appendChild (fieldElement) if (isinstance (value.CustomerReview, list)): for review in value.CustomerReview: fieldElement = doc.createElement ("field") fieldElement.setAttribute ("name", "Review"); textElement = doc.createTextNode (review.Summary + ": " + review.Comment) fieldElement.appendChild (textElement) bookElement.appendChild (fieldElement) else: fieldElement = doc.createElement ("field") fieldElement.setAttribute ("name", name); textElement = doc.createTextNode (value) fieldElement.appendChild (textElement) bookElement.appendChild (fieldElement) collection.appendChild (bookElement) print doc.toprettyxml(encoding="UTF-8", indent=" ") sys.stdout.flush()
divio/django
refs/heads/master
tests/cache/tests.py
142
# -*- coding: utf-8 -*- # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. from __future__ import unicode_literals import copy import os import re import shutil import tempfile import threading import time import unittest import warnings from django.conf import settings from django.core import management, signals from django.core.cache import ( DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches, ) from django.core.cache.utils import make_template_fragment_key from django.db import connection, connections, transaction from django.http import HttpRequest, HttpResponse, StreamingHttpResponse from django.middleware.cache import ( CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.middleware.csrf import CsrfViewMiddleware from django.template import engines from django.template.context_processors import csrf from django.template.response import TemplateResponse from django.test import ( RequestFactory, SimpleTestCase, TestCase, TransactionTestCase, override_settings, ) from django.test.signals import setting_changed from django.utils import six, timezone, translation from django.utils.cache import ( get_cache_key, learn_cache_key, patch_cache_control, patch_response_headers, patch_vary_headers, ) from django.utils.encoding import force_text from django.views.decorators.cache import cache_page from .models import Poll, expensive_calculation try: # Use the same idiom as in cache backends from django.utils.six.moves import cPickle as pickle except ImportError: import pickle # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpickable(object): def __getstate__(self): raise pickle.PickleError() @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class DummyCacheTests(SimpleTestCase): # The Dummy cache backend doesn't really behave like a test backend, # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" cache.set("key", "value") self.assertIsNone(cache.get("key")) def test_add(self): "Add doesn't do anything in dummy cache backend" cache.add("addkey1", "value") result = cache.add("addkey1", "newvalue") self.assertTrue(result) self.assertIsNone(cache.get("addkey1")) def test_non_existent(self): "Non-existent keys aren't found in the dummy cache backend" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" cache.set('a', 'a') cache.set('b', 'b') cache.set('c', 'c') cache.set('d', 'd') self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" cache.set("key1", "spam") cache.set("key2", "eggs") self.assertIsNone(cache.get("key1")) cache.delete("key1") self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" cache.set("hello1", "goodbye1") self.assertFalse(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" cache.set("hello2", "goodbye2") self.assertNotIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): "Dummy cache values can't be incremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.incr, 'answer') self.assertRaises(ValueError, cache.incr, 'does_not_exist') def test_decr(self): "Dummy cache values can't be decremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.decr, 'answer') self.assertRaises(ValueError, cache.decr, 'does_not_exist') def test_data_types(self): "All data types are ignored equally by the dummy cache" stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertIsNone(cache.get("stuff")) def test_expiration(self): "Expiration has no effect on the dummy cache" cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) cache.add("expire2", "newvalue") self.assertIsNone(cache.get("expire2")) self.assertFalse(cache.has_key("expire3")) def test_unicode(self): "Unicode values are ignored by the dummy cache" stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } for (key, value) in stuff.items(): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_set_many(self): "set_many does nothing for the dummy cache backend" cache.set_many({'a': 1, 'b': 2}) cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1') def test_delete_many(self): "delete_many does nothing for the dummy cache backend" cache.delete_many(['a', 'b']) def test_clear(self): "clear does nothing for the dummy cache backend" cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.incr_version, 'answer') self.assertRaises(ValueError, cache.incr_version, 'does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.decr_version, 'answer') self.assertRaises(ValueError, cache.decr_version, 'does_not_exist') def custom_key_func(key, key_prefix, version): "A customized cache key function" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, **params): # `base` is used to pull in the memcached config from the original settings, # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} setting = {k: base.copy() for k in _caches_setting_base.keys()} for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests(object): # A common set of tests to apply to all cache backends def setUp(self): self.factory = RequestFactory() def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set("key", "value") self.assertEqual(cache.get("key"), "value") def test_add(self): # A key can be added to a cache cache.add("addkey1", "value") result = cache.add("addkey1", "newvalue") self.assertFalse(result) self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertFalse(caches['prefix'].has_key('somekey')) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): # Non-existent cache keys return as None/default # get with non-existent keys self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set('a', 'a') cache.set('b', 'b') cache.set('c', 'c') cache.set('d', 'd') self.assertDictEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) self.assertDictEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) def test_delete(self): # Cache keys can be deleted cache.set("key1", "spam") cache.set("key2", "eggs") self.assertEqual(cache.get("key1"), "spam") cache.delete("key1") self.assertIsNone(cache.get("key1")) self.assertEqual(cache.get("key2"), "eggs") def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertTrue(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) cache.set("no_expiry", "here", None) self.assertTrue(cache.has_key("no_expiry")) def test_in(self): # The in operator can be used to inspect cache contents cache.set("hello2", "goodbye2") self.assertIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) self.assertRaises(ValueError, cache.incr, 'does_not_exist') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) self.assertRaises(ValueError, cache.decr, 'does_not_exist') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertEqual(cache.get("stuff"), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) cache.add("expire2", "newvalue") self.assertEqual(cache.get("expire2"), "newvalue") self.assertFalse(cache.has_key("expire3")) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } # Test `set` for (key, value) in stuff.items(): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): cache.delete(key) cache.add(key, value) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): cache.delete(key) cache.set_many(stuff) for (key, value) in stuff.items(): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add cache.add('binary1-add', compressed_value) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({"key1": "spam", "key2": "eggs"}) self.assertEqual(cache.get("key1"), "spam") self.assertEqual(cache.get("key2"), "eggs") def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set("key1", "spam") cache.set("key2", "eggs") cache.set("key3", "ham") cache.delete_many(["key1", "key2"]) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear cache.set("key1", "spam") cache.set("key2", "eggs") cache.clear() self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_long_timeout(self): ''' Using a timeout greater than 30 days makes memcached think it is an absolute expiration timestamp instead of a relative offset. Test that we honour this convention. Refs #12399. ''' cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key2'), 'ham') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): ''' Passing in None into timeout results in a value that is cached forever ''' cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') cache.add('key2', 'ham', None) self.assertEqual(cache.get('key2'), 'ham') added = cache.add('key1', 'new eggs', None) self.assertEqual(added, False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_zero_timeout(self): ''' Passing in zero into timeout results in a value that is not cached ''' cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) cache.add('key2', 'ham', 0) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set("key1", "spam", 100.2) self.assertEqual(cache.get("key1"), "spam") def _perform_cull_test(self, cull_cache, initial_count, final_count): # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count = count + 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test(caches['cull'], 50, 29) def test_zero_cull(self): self._perform_cull_test(caches['zero_cull'], 50, 19) def test_invalid_keys(self): """ All the builtin backends (except memcached, see below) should warn on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = func try: with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # memcached does not allow whitespace or control characters in keys cache.set('key with spaces', 'value') self.assertEqual(len(w), 2) self.assertIsInstance(w[0].message, CacheKeyWarning) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # memcached limits key length to 250 cache.set('a' * 251, 'value') self.assertEqual(len(w), 1) self.assertIsInstance(w[0].message, CacheKeyWarning) finally: cache.key_func = old_func def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 cache.add('answer1', 42, version=2) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.add('answer1', 37, version=2) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.add('answer1', 37, version=1) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 caches['v2'].add('answer2', 42) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) caches['v2'].add('answer2', 37) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) caches['v2'].add('answer2', 37, version=1) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 caches['v2'].add('answer3', 42, version=1) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) caches['v2'].add('answer3', 37, version=1) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) caches['v2'].add('answer3', 37) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertTrue(cache.has_key('answer1')) self.assertTrue(cache.has_key('answer1', version=1)) self.assertFalse(cache.has_key('answer1', version=2)) self.assertFalse(caches['v2'].has_key('answer1')) self.assertTrue(caches['v2'].has_key('answer1', version=1)) self.assertFalse(caches['v2'].has_key('answer1', version=2)) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) cache.delete('answer1') self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) cache.delete('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) caches['v2'].delete('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) caches['v2'].delete('answer4', version=1) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) cache.incr('answer1') self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) cache.decr('answer1') self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) cache.incr('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) cache.decr('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) caches['v2'].incr('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) caches['v2'].decr('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) caches['v2'].incr('answer4', version=1) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) caches['v2'].decr('answer4', version=1) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertDictEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertDictEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertDictEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertDictEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) self.assertRaises(ValueError, cache.incr_version, 'does_not_exist') def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertRaises(ValueError, cache.decr_version, 'does_not_exist', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpickable_object(self): update_middleware = UpdateCacheMiddleware() update_middleware.cache = cache fetch_middleware = FetchFromCacheMiddleware() fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) response = HttpResponse() content = 'Testing cookie serialization.' response.content = content response.set_cookie('foo', 'bar') update_middleware.process_response(request, response) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode('utf-8')) self.assertEqual(get_cache_data.cookies, response.cookies) update_middleware.process_response(request, get_cache_data) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode('utf-8')) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): "See https://code.djangoproject.com/ticket/21200" with self.assertRaises(pickle.PickleError): cache.add('unpickable', Unpickable()) def test_set_fail_on_pickleerror(self): "See https://code.djangoproject.com/ticket/21200" with self.assertRaises(pickle.PickleError): cache.set('unpickable', Unpickable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') def test_get_or_set_version(self): cache.get_or_set('brian', 1979, version=2) with self.assertRaisesMessage(ValueError, 'You need to specify a value.'): cache.get_or_set('brian') with self.assertRaisesMessage(ValueError, 'You need to specify a value.'): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Spaces are used in the table name to ensure quoting/escaping is working LOCATION='test cache table' )) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] def setUp(self): # The super calls needs to happen first for the settings override. super(DBCacheTests, self).setUp() self.create_table() def tearDown(self): # The super call needs to happen first because it uses the database. super(DBCacheTests, self).tearDown() self.drop_table() def create_table(self): management.call_command('createcachetable', verbosity=0, interactive=False) def drop_table(self): with connection.cursor() as cursor: table_name = connection.ops.quote_name('test cache table') cursor.execute('DROP TABLE %s' % table_name) def test_zero_cull(self): self._perform_cull_test(caches['zero_cull'], 50, 18) def test_second_call_doesnt_crash(self): out = six.StringIO() management.call_command('createcachetable', stdout=out) self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Use another table name to avoid the 'table already exists' message. LOCATION='createcachetable_dry_run_mode' )) def test_createcachetable_dry_run_mode(self): out = six.StringIO() management.call_command('createcachetable', dry_run=True, stdout=out) output = out.getvalue() self.assertTrue(output.startswith("CREATE TABLE")) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ self.drop_table() out = six.StringIO() management.call_command( 'createcachetable', 'test cache table', verbosity=2, stdout=out, ) self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n") def test_clear_commits_transaction(self): # Ensure the database transaction is committed (#19896) cache.set("key1", "spam") cache.clear() transaction.rollback() self.assertIsNone(cache.get("key1")) @override_settings(USE_TZ=True) class DBCacheWithTimeZoneTests(DBCacheTests): pass class DBCacheRouter(object): """A router that puts the cache table on the 'other' database.""" def db_for_read(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def db_for_write(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def allow_migrate(self, db, app_label, **hints): if app_label == 'django_cache': return db == 'other' return None @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', }, }, ) class CreateCacheTableForDBCacheTests(TestCase): multi_db = True @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) def test_createcachetable_observes_database_router(self): # cache table should not be created on 'default' with self.assertNumQueries(0, using='default'): management.call_command('createcachetable', database='default', verbosity=0, interactive=False) # cache table should be created on 'other' # Queries: # 1: check table doesn't already exist # 2: create savepoint (if transactional DDL is supported) # 3: create the table # 4: create the index # 5: release savepoint (if transactional DDL is supported) num = 5 if connections['other'].features.can_rollback_ddl else 3 with self.assertNumQueries(num, using='other'): management.call_command('createcachetable', database='other', verbosity=0, interactive=False) class PicklingSideEffect(object): def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): if self.cache._lock.active_writers: self.locked = True return {} @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', )) class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): super(LocMemCacheTests, self).setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. caches['prefix']._cache = cache._cache caches['prefix']._expire_info = cache._expire_info caches['v2']._cache = cache._cache caches['v2']._expire_info = cache._expire_info caches['custom_key']._cache = cache._cache caches['custom_key']._expire_info = cache._expire_info caches['custom_key2']._cache = cache._cache caches['custom_key2']._expire_info = cache._expire_info @override_settings(CACHES={ 'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other' }, }) def test_multiple_caches(self): "Check that multiple locmem caches are isolated" cache.set('value', 42) self.assertEqual(caches['default'].get('value'), 42) self.assertIsNone(caches['other'].get('value')) def test_locking_on_pickle(self): """#20613/#18541 -- Ensures pickling is done outside of the lock.""" bad_obj = PicklingSideEffect(cache) cache.set('set', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") cache.add('add', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' _key = cache.make_key(key) cache.set(key, 1, timeout=cache.default_timeout * 10) expire = cache._expire_info[_key] cache.incr(key) self.assertEqual(expire, cache._expire_info[_key]) cache.decr(key) self.assertEqual(expire, cache._expire_info[_key]) # memcached backend isn't guaranteed to be available. # To check the memcached backend, the test settings file will # need to contain at least one cache backend setting that points at # your memcache server. memcached_params = {} for _cache_params in settings.CACHES.values(): if _cache_params['BACKEND'].startswith('django.core.cache.backends.memcached.'): memcached_params = _cache_params memcached_never_expiring_params = memcached_params.copy() memcached_never_expiring_params['TIMEOUT'] = None memcached_far_future_params = memcached_params.copy() memcached_far_future_params['TIMEOUT'] = 31536000 # 60*60*24*365, 1 year @unittest.skipUnless(memcached_params, "memcached not available") @override_settings(CACHES=caches_setting_for_tests(base=memcached_params)) class MemcachedCacheTests(BaseCacheTests, TestCase): def test_invalid_keys(self): """ On memcached, we don't introduce a duplicate key validation step (for speed reasons), we just let the memcached API library raise its own exception on bad keys. Refs #6447. In order to be memcached-API-library agnostic, we only assert that a generic exception of some kind is raised. """ # memcached does not allow whitespace or control characters in keys self.assertRaises(Exception, cache.set, 'key with spaces', 'value') # memcached limits key length to 250 self.assertRaises(Exception, cache.set, 'a' * 251, 'value') # Explicitly display a skipped test if no configured cache uses MemcachedCache @unittest.skipUnless( memcached_params.get('BACKEND') == 'django.core.cache.backends.memcached.MemcachedCache', "cache with python-memcached library not available") def test_memcached_uses_highest_pickle_version(self): # Regression test for #19810 for cache_key, cache_config in settings.CACHES.items(): if cache_config['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache': self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL) @override_settings(CACHES=caches_setting_for_tests(base=memcached_never_expiring_params)) def test_default_never_expiring_timeout(self): # Regression test for #22845 cache.set('infinite_foo', 'bar') self.assertEqual(cache.get('infinite_foo'), 'bar') @override_settings(CACHES=caches_setting_for_tests(base=memcached_far_future_params)) def test_default_far_future_timeout(self): # Regression test for #22845 cache.set('future_foo', 'bar') self.assertEqual(cache.get('future_foo'), 'bar') def test_cull(self): # culling isn't implemented, memcached deals with it. pass def test_zero_cull(self): # culling isn't implemented, memcached deals with it. pass def test_memcached_deletes_key_on_failed_set(self): # By default memcached allows objects up to 1MB. For the cache_db session # backend to always use the current session, memcached needs to delete # the old key if it fails to set. # pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can # tell from a quick check of its source code. This is falling back to # the default value exposed by python-memcached on my system. max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576) cache.set('small_value', 'a') self.assertEqual(cache.get('small_value'), 'a') large_value = 'a' * (max_value_length + 1) cache.set('small_value', large_value) # small_value should be deleted, or set if configured to accept larger values value = cache.get('small_value') self.assertTrue(value is None or value == large_value) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.filebased.FileBasedCache', )) class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ def setUp(self): super(FileBasedCacheTests, self).setUp() self.dirname = tempfile.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params.update({'LOCATION': self.dirname}) setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super(FileBasedCacheTests, self).tearDown() # Call parent first, as cache.clear() may recreate cache base directory shutil.rmtree(self.dirname) def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') os.path.exists(self.dirname) @override_settings(CACHES={ 'default': { 'BACKEND': 'cache.liberal_backend.CacheClass', }, }) class CustomCacheKeyValidationTests(SimpleTestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin backend, and override the default key validation. Refs #6447. """ def test_custom_key_validation(self): # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' cache.set(key, val) self.assertEqual(cache.get(key), val) @override_settings( CACHES={ 'default': { 'BACKEND': 'cache.closeable_cache.CacheClass', } } ) class CacheClosingTests(SimpleTestCase): def test_close(self): self.assertFalse(cache.closed) signals.request_finished.send(self.__class__) self.assertTrue(cache.closed) DEFAULT_MEMORY_CACHES_SETTINGS = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } } NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS) NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None class DefaultNonExpiringCacheKeyTests(SimpleTestCase): """Tests that verify that settings having Cache arguments with a TIMEOUT set to `None` will create Caches that will set non-expiring keys. This fixes ticket #22085. """ def setUp(self): # The 5 minute (300 seconds) default expiration time for keys is # defined in the implementation of the initializer method of the # BaseCache type. self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout def tearDown(self): del(self.DEFAULT_TIMEOUT) def test_default_expiration_time_for_keys_is_5_minutes(self): """The default expiration time of a cache key is 5 minutes. This value is defined inside the __init__() method of the :class:`django.core.cache.backends.base.BaseCache` type. """ self.assertEqual(300, self.DEFAULT_TIMEOUT) def test_caches_with_unset_timeout_has_correct_default_timeout(self): """Caches that have the TIMEOUT parameter undefined in the default settings will use the default 5 minute timeout. """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self): """Memory caches that have the TIMEOUT parameter set to `None` in the default settings with have `None` as the default timeout. This means "no timeout". """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertIsNone(cache.default_timeout) self.assertIsNone(cache.get_backend_timeout()) @override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS) def test_caches_with_unset_timeout_set_expiring_key(self): """Memory caches that have the TIMEOUT parameter unset will set cache keys having the default 5 minute timeout. """ key = "my-key" value = "my-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNotNone(cache._expire_info[cache_key]) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def text_caches_set_with_timeout_as_none_set_non_expiring_key(self): """Memory caches that have the TIMEOUT parameter set to `None` will set a non expiring key by default. """ key = "another-key" value = "another-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNone(cache._expire_info[cache_key]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class CacheUtils(SimpleTestCase): """TestCase for django.utils.cache functions.""" def setUp(self): self.host = 'www.example.com' self.path = '/cache/test/' self.factory = RequestFactory(HTTP_HOST=self.host) def tearDown(self): cache.clear() def _get_request_cache(self, method='GET', query_string=None, update_cache=None): request = self._get_request(self.host, self.path, method, query_string=query_string) request._cache_update_cache = True if not update_cache else update_cache return request def _set_cache(self, request, msg): response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: response = HttpResponse() if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) # Verify that a specified key_prefix is taken into account. key_prefix = 'localprefix' learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # Verify that the querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' 'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e' ) def test_cache_key_varies_by_url(self): """ get_cache_key keys differ by fully-qualified URL instead of path """ request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') learn_cache_key(request1, HttpResponse()) request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com') learn_cache_key(request2, HttpResponse()) self.assertNotEqual(get_cache_key(request1), get_cache_key(request2)) def test_learn_cache_key(self): request = self.factory.head(self.path) response = HttpResponse() response['Vary'] = 'Pony' # Make sure that the Vary header is added to the key hash learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_patch_cache_control(self): tests = ( # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts (None, {'private': True}, {'private'}), # Test whether private/public attributes are mutually exclusive ('private', {'private': True}, {'private'}), ('private', {'public': True}, {'public'}), ('public', {'public': True}, {'public'}), ('public', {'private': True}, {'private'}), ('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}), ('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ) cc_delim_re = re.compile(r'\s*,\s*') for initial_cc, newheaders, expected_cc in tests: response = HttpResponse() if initial_cc is not None: response['Cache-Control'] = initial_cc patch_cache_control(response, **newheaders) parts = set(cc_delim_re.split(response['Cache-Control'])) self.assertEqual(parts, expected_cc) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix', }, }, ) class PrefixedCacheUtils(CacheUtils): pass @override_settings( CACHE_MIDDLEWARE_SECONDS=60, CACHE_MIDDLEWARE_KEY_PREFIX='test', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) class CacheHEADTest(SimpleTestCase): def setUp(self): self.path = '/cache/test/' self.factory = RequestFactory() def tearDown(self): cache.clear() def _set_cache(self, request, msg): response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) def test_head_caches_correctly(self): test_content = 'test content' request = self.factory.head(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) def test_head_with_cached_get(self): test_content = 'test content' request = self.factory.get(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, LANGUAGES=[ ('en', 'English'), ('es', 'Spanish'), ], ) class CacheI18nTest(TestCase): def setUp(self): self.path = '/cache/test/' self.factory = RequestFactory() def tearDown(self): cache.clear() @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") key2 = get_cache_key(request) self.assertEqual(key, key2) def check_accept_language_vary(self, accept_language, vary, reference_key): request = self.factory.get(self.path) request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = vary key = learn_cache_key(request, response) key2 = get_cache_key(request) self.assertEqual(key, reference_key) self.assertEqual(key2, reference_key) @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation_accept_language(self): lang = translation.get_language() self.assertEqual(lang, 'en') request = self.factory.get(self.path) request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = 'accept-encoding' key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") self.check_accept_language_vary( 'en-us', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'en-US', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'en-US,en;q=0.8', 'accept-encoding, accept-language, cookie', key ) self.check_accept_language_vary( 'en-US,en;q=0.8,ko;q=0.6', 'accept-language, cookie, accept-encoding', key ) self.check_accept_language_vary( 'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ', 'accept-encoding, cookie, accept-language', key ) self.check_accept_language_vary( 'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4', 'accept-language, accept-encoding, cookie', key ) self.check_accept_language_vary( 'ko;q=1.0,en;q=0.5', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'ko, en', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'ko-KR, en-US', 'accept-encoding, accept-language, cookie', key ) @override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False) def test_cache_key_i18n_formatting(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when formatting is active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_i18n_timezone(self): request = self.factory.get(self.path) # This is tightly coupled to the implementation, # but it's the most straightforward way to test the key. tz = force_text(timezone.get_current_timezone_name(), errors='ignore') tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_') response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False) def test_cache_key_no_i18n(self): request = self.factory.get(self.path) lang = translation.get_language() tz = force_text(timezone.get_current_timezone_name(), errors='ignore') tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_') response = HttpResponse() key = learn_cache_key(request, response) self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active") self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active") @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_with_non_ascii_tzname(self): # Regression test for #17476 class CustomTzName(timezone.UTC): name = '' def tzname(self, dt): return self.name request = self.factory.get(self.path) response = HttpResponse() with timezone.override(CustomTzName()): CustomTzName.name = 'Hora estándar de Argentina'.encode('UTF-8') # UTF-8 string sanitized_name = 'Hora_estndar_de_Argentina' self.assertIn(sanitized_name, learn_cache_key(request, response), "Cache keys should include the time zone name when time zones are active") CustomTzName.name = 'Hora estándar de Argentina' # unicode sanitized_name = 'Hora_estndar_de_Argentina' self.assertIn(sanitized_name, learn_cache_key(request, response), "Cache keys should include the time zone name when time zones are active") @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_ETAGS=True, USE_I18N=True, ) def test_middleware(self): def set_cache(request, lang, msg): translation.activate(lang) response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) # cache with non empty request.GET request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) # first access, cache must return None self.assertIsNone(get_cache_data) response = HttpResponse() content = 'Check for cache with QUERY_STRING' response.content = content UpdateCacheMiddleware().process_response(request, response) get_cache_data = FetchFromCacheMiddleware().process_request(request) # cache must return content self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) # different QUERY_STRING, cache must be empty request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) # i18n tests en_message = "Hello world!" es_message = "Hola mundo!" request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware().process_request(request) # Check that we can recover the cache self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, en_message.encode()) # Check that we use etags self.assertTrue(get_cache_data.has_header('ETag')) # Check that we can disable etags with self.settings(USE_ETAGS=False): request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertFalse(get_cache_data.has_header('ETag')) # change the session language and set content request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'es', es_message) # change again the language translation.activate('en') # retrieve the content from cache get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertEqual(get_cache_data.content, en_message.encode()) # change again the language translation.activate('es') get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertEqual(get_cache_data.content, es_message.encode()) # reset the language translation.deactivate() @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_ETAGS=True, ) def test_middleware_doesnt_cache_streaming_response(self): request = self.factory.get(self.path) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) # This test passes on Python < 3.3 even without the corresponding code # in UpdateCacheMiddleware, because pickling a StreamingHttpResponse # fails (http://bugs.python.org/issue14288). LocMemCache silently # swallows the exception and doesn't store the response in cache. content = ['Check for cache with streaming content.'] response = StreamingHttpResponse(content) UpdateCacheMiddleware().process_response(request, response) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix' }, }, ) class PrefixedCacheI18nTest(CacheI18nTest): pass def hello_world_view(request, value): return HttpResponse('Hello World %s' % value) def csrf_view(request): return HttpResponse(csrf(request)['csrf_token']) @override_settings( CACHE_MIDDLEWARE_ALIAS='other', CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix', CACHE_MIDDLEWARE_SECONDS=30, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other', 'TIMEOUT': '1', }, }, ) class CacheMiddlewareTest(SimpleTestCase): def setUp(self): super(CacheMiddlewareTest, self).setUp() self.factory = RequestFactory() self.default_cache = caches['default'] self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() self.other_cache.clear() super(CacheMiddlewareTest, self).tearDown() def test_constructor(self): """ Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as Middleware vs. usage of CacheMiddleware as view decorator and setting attributes appropriately. """ # If no arguments are passed in construction, it's being used as middleware. middleware = CacheMiddleware() # Now test object attributes against values defined in setUp above self.assertEqual(middleware.cache_timeout, 30) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') # If arguments are being passed in construction, it's being used as a decorator. # First, test with "defaults": as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None) self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache # Next, test with custom values: as_view_decorator_with_custom = CacheMiddleware(cache_timeout=60, cache_alias='other', key_prefix='foo') self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60) self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo') self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other') def test_middleware(self): middleware = CacheMiddleware() prefix_middleware = CacheMiddleware(key_prefix='prefix1') timeout_middleware = CacheMiddleware(cache_timeout=1) request = self.factory.get('/view/') # Put the request through the request middleware result = middleware.process_request(request) self.assertIsNone(result) response = hello_world_view(request, '1') # Now put the response through the response middleware response = middleware.process_response(request, response) # Repeating the request should result in a cache hit result = middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') # The same request through a different middleware won't hit result = prefix_middleware.process_request(request) self.assertIsNone(result) # The same request with a timeout _will_ hit result = timeout_middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') def test_view_decorator(self): # decorate the same view with different cache decorators default_view = cache_page(3)(hello_world_view) default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view) explicit_default_view = cache_page(3, cache='default')(hello_world_view) explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view) other_view = cache_page(1, cache='other')(hello_world_view) other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view) request = self.factory.get('/view/') # Request the view once response = default_view(request, '1') self.assertEqual(response.content, b'Hello World 1') # Request again -- hit the cache response = default_view(request, '2') self.assertEqual(response.content, b'Hello World 1') # Requesting the same view with the explicit cache should yield the same result response = explicit_default_view(request, '3') self.assertEqual(response.content, b'Hello World 1') # Requesting with a prefix will hit a different cache key response = explicit_default_with_prefix_view(request, '4') self.assertEqual(response.content, b'Hello World 4') # Hitting the same view again gives a cache hit response = explicit_default_with_prefix_view(request, '5') self.assertEqual(response.content, b'Hello World 4') # And going back to the implicit cache will hit the same cache response = default_with_prefix_view(request, '6') self.assertEqual(response.content, b'Hello World 4') # Requesting from an alternate cache won't hit cache response = other_view(request, '7') self.assertEqual(response.content, b'Hello World 7') # But a repeated hit will hit cache response = other_view(request, '8') self.assertEqual(response.content, b'Hello World 7') # And prefixing the alternate cache yields yet another cache entry response = other_with_prefix_view(request, '9') self.assertEqual(response.content, b'Hello World 9') # But if we wait a couple of seconds... time.sleep(2) # ... the default cache will still hit caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') # ... the default cache with a prefix will still hit response = default_with_prefix_view(request, '12') self.assertEqual(response.content, b'Hello World 4') # ... the explicit default cache will still hit response = explicit_default_view(request, '13') self.assertEqual(response.content, b'Hello World 1') # ... the explicit default cache with a prefix will still hit response = explicit_default_with_prefix_view(request, '14') self.assertEqual(response.content, b'Hello World 4') # .. but a rapidly expiring cache won't hit response = other_view(request, '15') self.assertEqual(response.content, b'Hello World 15') # .. even if it has a prefix response = other_with_prefix_view(request, '16') self.assertEqual(response.content, b'Hello World 16') def test_sensitive_cookie_not_cached(self): """ Django must prevent caching of responses that set a user-specific (and maybe security sensitive) cookie in response to a cookie-less request. """ csrf_middleware = CsrfViewMiddleware() cache_middleware = CacheMiddleware() request = self.factory.get('/view/') self.assertIsNone(cache_middleware.process_request(request)) csrf_middleware.process_view(request, csrf_view, (), {}) response = csrf_view(request) response = csrf_middleware.process_response(request, response) response = cache_middleware.process_response(request, response) # Inserting a CSRF cookie in a cookie-less request prevented caching. self.assertIsNone(cache_middleware.process_request(request)) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class TestWithTemplateResponse(SimpleTestCase): """ Tests various headers w/ TemplateResponse. Most are probably redundant since they manipulate the same object anyway but the Etag header is 'special' because it relies on the content being complete (which is not necessarily always the case with a TemplateResponse) """ def setUp(self): self.path = '/cache/test/' self.factory = RequestFactory() def tearDown(self): cache.clear() def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) key_prefix = 'localprefix' # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) # Verify that a specified key_prefix is taken into account. learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # Verify that the querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e' ) @override_settings(USE_ETAGS=False) def test_without_etag(self): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) self.assertFalse(response.has_header('ETag')) patch_response_headers(response) self.assertFalse(response.has_header('ETag')) response = response.render() self.assertFalse(response.has_header('ETag')) @override_settings(USE_ETAGS=True) def test_with_etag(self): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) self.assertFalse(response.has_header('ETag')) patch_response_headers(response) self.assertFalse(response.has_header('ETag')) response = response.render() self.assertTrue(response.has_header('ETag')) class TestMakeTemplateFragmentKey(SimpleTestCase): def test_without_vary_on(self): key = make_template_fragment_key('a.fragment') self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e') def test_with_one_vary_on(self): key = make_template_fragment_key('foo', ['abc']) self.assertEqual(key, 'template.cache.foo.900150983cd24fb0d6963f7d28e17f72') def test_with_many_vary_on(self): key = make_template_fragment_key('bar', ['abc', 'def']) self.assertEqual(key, 'template.cache.bar.4b35f12ab03cec09beec4c21b2d2fa88') def test_proper_escaping(self): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469') class CacheHandlerTest(SimpleTestCase): def test_same_instance(self): """ Attempting to retrieve the same alias should yield the same instance. """ cache1 = caches['default'] cache2 = caches['default'] self.assertIs(cache1, cache2) def test_per_thread(self): """ Requesting the same alias from separate threads should yield separate instances. """ c = [] def runner(): c.append(caches['default']) for x in range(2): t = threading.Thread(target=runner) t.start() t.join() self.assertIsNot(c[0], c[1])
rush2catch/algorithms-leetcode
refs/heads/master
Trees/leet_145_BinaryTreePostorderTraversal.py
1
# Problem: Binary Tree Post-order Traversal # Difficulty: Hard # Category: Tree # Leetcode 145: https://leetcode.com/problems/binary-tree-postorder-traversal/description/ # Description: """ Given a binary tree, return the postorder traversal of its nodes' values. For example: Given binary tree {1,#,2,3}, 1 \ 2 / 3 return [3,2,1]. """ # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def postorder(self, root): if root is None: return [] curr = root res = [] stack = [] while curr or stack: if curr: stack.append(curr) res.insert(0, curr.val) curr = curr.right else: node = stack.pop() curr = node.left return res
dhalleine/tensorflow
refs/heads/master
tensorflow/python/kernel_tests/depthtospace_op_test.py
4
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for DepthToSpace op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf class DepthToSpaceTest(tf.test.TestCase): def _testOne(self, inputs, block_size, outputs): for use_gpu in [False, True]: with self.test_session(use_gpu=use_gpu): x_tf = tf.depth_to_space(tf.to_float(inputs), block_size) self.assertAllEqual(x_tf.eval(), outputs) def testBasic(self): x_np = [[[[1, 2, 3, 4]]]] block_size = 2 x_out = [[[[1], [2]], [[3], [4]]]] self._testOne(x_np, block_size, x_out) # Tests for larger input dimensions. To make sure elements are # correctly ordered spatially. def testBlockSize2(self): x_np = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[9, 10, 11, 12], [13, 14, 15, 16]]]] block_size = 2 x_out = [[[[1], [2], [5], [6]], [[3], [4], [7], [8]], [[9], [10], [13], [14]], [[11], [12], [15], [16]]]] self._testOne(x_np, block_size, x_out) def testBlockSize2Batch10(self): block_size = 2 def batch_input_elt(i): return [[[1 * i, 2 * i, 3 * i, 4 * i], [5 * i, 6 * i, 7 * i, 8 * i]], [[9 * i, 10 * i, 11 * i, 12 * i], [13 * i, 14 * i, 15 * i, 16 * i]]] def batch_output_elt(i): return [[[1 * i], [2 * i], [5 * i], [6 * i]], [[3 * i], [4 * i], [7 * i], [8 * i]], [[9 * i], [10 * i], [13 * i], [14 * i]], [[11 * i], [12 * i], [15 * i], [16 * i]]] batch_size = 10 x_np = [batch_input_elt(i) for i in range(batch_size)] x_out = [batch_output_elt(i) for i in range(batch_size)] self._testOne(x_np, block_size, x_out) # Tests for different width and height. def testNonSquare(self): x_np = [[[[1, 10, 2, 20, 3, 30, 4, 40]], [[5, 50, 6, 60, 7, 70, 8, 80]], [[9, 90, 10, 100, 11, 110, 12, 120]]]] block_size = 2 x_out = [[[[1, 10], [2, 20]], [[3, 30], [4, 40]], [[5, 50], [6, 60]], [[7, 70], [8, 80]], [[9, 90], [10, 100]], [[11, 110], [12, 120]]]] self._testOne(x_np, block_size, x_out) # Tests for larger input dimensions. To make sure elements are # correctly ordered spatially. def testBlockSize4FlatInput(self): x_np = [[[[1, 2, 5, 6, 3, 4, 7, 8, 9, 10, 13, 14, 11, 12, 15, 16]]]] block_size = 4 x_out = [[[[1], [2], [5], [6]], [[3], [4], [7], [8]], [[9], [10], [13], [14]], [[11], [12], [15], [16]]]] self._testOne(x_np, block_size, x_out) # Tests for larger input depths. # To make sure elements are properly interleaved in depth. def testDepthInterleaved(self): x_np = [[[[1, 10, 2, 20, 3, 30, 4, 40]]]] block_size = 2 x_out = [[[[1, 10], [2, 20]], [[3, 30], [4, 40]]]] self._testOne(x_np, block_size, x_out) # Tests for larger input depths. Here an odd depth. # To make sure elements are properly interleaved in depth. def testDepthInterleavedDepth3(self): x_np = [[[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]]]] block_size = 2 x_out = [[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]] self._testOne(x_np, block_size, x_out) # Tests for larger input depths. # To make sure elements are properly interleaved in depth. def testDepthInterleavedLarger(self): x_np = [[[[1, 10, 2, 20, 3, 30, 4, 40], [5, 50, 6, 60, 7, 70, 8, 80]], [[9, 90, 10, 100, 11, 110, 12, 120], [13, 130, 14, 140, 15, 150, 16, 160]]]] block_size = 2 x_out = [[[[1, 10], [2, 20], [5, 50], [6, 60]], [[3, 30], [4, 40], [7, 70], [8, 80]], [[9, 90], [10, 100], [13, 130], [14, 140]], [[11, 110], [12, 120], [15, 150], [16, 160]]]] self._testOne(x_np, block_size, x_out) # Error handling: # Tests for a block larger for the depth. In this case should raise an # exception. def testBlockSizeTooLarge(self): x_np = [[[[1, 2, 3, 4], [5, 6, 7, 8]], [[9, 10, 11, 12], [13, 14, 15, 16]]]] block_size = 4 # Raise an exception, since th depth is only 4 and needs to be # divisible by 16. with self.assertRaises(IndexError): out_tf = tf.depth_to_space(x_np, block_size) out_tf.eval() # Test when the block size is 0. def testBlockSize0(self): x_np = [[[[1], [2]], [[3], [4]]]] block_size = 0 with self.assertRaises(ValueError): out_tf = tf.depth_to_space(x_np, block_size) out_tf.eval() # Test when the block size is 1. The block size should be > 1. def testBlockSizeOne(self): x_np = [[[[1, 1, 1, 1], [2, 2, 2, 2]], [[3, 3, 3, 3], [4, 4, 4, 4]]]] block_size = 1 with self.assertRaises(ValueError): out_tf = tf.depth_to_space(x_np, block_size) out_tf.eval() def testBlockSizeLargerThanInput(self): # The block size is too large for this input. x_np = [[[[1], [2]], [[3], [4]]]] block_size = 10 with self.assertRaises(IndexError): out_tf = tf.space_to_depth(x_np, block_size) out_tf.eval() def testBlockSizeNotDivisibleDepth(self): # The depth is not divisible by the square of the block size. x_np = [[[[1, 1, 1, 1], [2, 2, 2, 2]], [[3, 3, 3, 3], [4, 4, 4, 4]]]] block_size = 3 with self.assertRaises(IndexError): _ = tf.space_to_depth(x_np, block_size) def testUnknownShape(self): t = tf.depth_to_space(tf.placeholder(tf.float32), block_size=4) self.assertEqual(4, t.get_shape().ndims) class DepthToSpaceGradientTest(tf.test.TestCase): # Check the gradients. def _checkGrad(self, x, block_size): assert 4 == x.ndim with self.test_session(): tf_x = tf.convert_to_tensor(x) tf_y = tf.depth_to_space(tf_x, block_size) epsilon = 1e-2 ((x_jacob_t, x_jacob_n)) = tf.test.compute_gradient( tf_x, x.shape, tf_y, tf_y.get_shape().as_list(), x_init_value=x, delta=epsilon) self.assertAllClose(x_jacob_t, x_jacob_n, rtol=1e-2, atol=epsilon) # Tests a gradient for depth_to_space of x which is a four dimensional # tensor of shape [b, h, w, d * block_size * block_size]. def _compare(self, b, h, w, d, block_size): block_size_sq = block_size * block_size x = np.random.normal( 0, 1, b * h * w * d * block_size_sq).astype(np.float32).reshape( [b, h, w, d * block_size_sq]) self._checkGrad(x, block_size) # Don't use very large numbers as dimensions here, as the result is tensor # with cartesian product of the dimensions. def testSmall(self): block_size = 2 self._compare(3, 2, 5, 3, block_size) def testSmall2(self): block_size = 3 self._compare(1, 2, 3, 2, block_size) if __name__ == "__main__": tf.test.main()
barbuza/django
refs/heads/master
django/db/migrations/operations/__init__.py
394
from .fields import AddField, AlterField, RemoveField, RenameField from .models import ( AlterIndexTogether, AlterModelManagers, AlterModelOptions, AlterModelTable, AlterOrderWithRespectTo, AlterUniqueTogether, CreateModel, DeleteModel, RenameModel, ) from .special import RunPython, RunSQL, SeparateDatabaseAndState __all__ = [ 'CreateModel', 'DeleteModel', 'AlterModelTable', 'AlterUniqueTogether', 'RenameModel', 'AlterIndexTogether', 'AlterModelOptions', 'AddField', 'RemoveField', 'AlterField', 'RenameField', 'SeparateDatabaseAndState', 'RunSQL', 'RunPython', 'AlterOrderWithRespectTo', 'AlterModelManagers', ]
Kimanicodes/bc-9-jibiza
refs/heads/master
Questions/__init__.py
12133432
synasius/django
refs/heads/master
tests/gis_tests/relatedapp/__init__.py
12133432