repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
timoschwarzer/blendworks | refs/heads/master | BlendWorks Server/python/Lib/idlelib/ObjectBrowser.py | 67 | # XXX TO DO:
# - popup menu
# - support partial or total redisplay
# - more doc strings
# - tooltips
# object browser
# XXX TO DO:
# - for classes/modules, add "open source" to object browser
from idlelib.TreeWidget import TreeItem, TreeNode, ScrolledCanvas
from reprlib import Repr
myrepr = Repr()
myrepr.maxstring = 100
myrepr.maxother = 100
class ObjectTreeItem(TreeItem):
def __init__(self, labeltext, object, setfunction=None):
self.labeltext = labeltext
self.object = object
self.setfunction = setfunction
def GetLabelText(self):
return self.labeltext
def GetText(self):
return myrepr.repr(self.object)
def GetIconName(self):
if not self.IsExpandable():
return "python"
def IsEditable(self):
return self.setfunction is not None
def SetText(self, text):
try:
value = eval(text)
self.setfunction(value)
except:
pass
else:
self.object = value
def IsExpandable(self):
return not not dir(self.object)
def GetSubList(self):
keys = dir(self.object)
sublist = []
for key in keys:
try:
value = getattr(self.object, key)
except AttributeError:
continue
item = make_objecttreeitem(
str(key) + " =",
value,
lambda value, key=key, object=self.object:
setattr(object, key, value))
sublist.append(item)
return sublist
class ClassTreeItem(ObjectTreeItem):
def IsExpandable(self):
return True
def GetSubList(self):
sublist = ObjectTreeItem.GetSubList(self)
if len(self.object.__bases__) == 1:
item = make_objecttreeitem("__bases__[0] =",
self.object.__bases__[0])
else:
item = make_objecttreeitem("__bases__ =", self.object.__bases__)
sublist.insert(0, item)
return sublist
class AtomicObjectTreeItem(ObjectTreeItem):
def IsExpandable(self):
return 0
class SequenceTreeItem(ObjectTreeItem):
def IsExpandable(self):
return len(self.object) > 0
def keys(self):
return range(len(self.object))
def GetSubList(self):
sublist = []
for key in self.keys():
try:
value = self.object[key]
except KeyError:
continue
def setfunction(value, key=key, object=self.object):
object[key] = value
item = make_objecttreeitem("%r:" % (key,), value, setfunction)
sublist.append(item)
return sublist
class DictTreeItem(SequenceTreeItem):
def keys(self):
keys = list(self.object.keys())
try:
keys.sort()
except:
pass
return keys
dispatch = {
int: AtomicObjectTreeItem,
float: AtomicObjectTreeItem,
str: AtomicObjectTreeItem,
tuple: SequenceTreeItem,
list: SequenceTreeItem,
dict: DictTreeItem,
type: ClassTreeItem,
}
def make_objecttreeitem(labeltext, object, setfunction=None):
t = type(object)
if t in dispatch:
c = dispatch[t]
else:
c = ObjectTreeItem
return c(labeltext, object, setfunction)
# Test script
def _test():
import sys
from tkinter import Tk
root = Tk()
root.configure(bd=0, bg="yellow")
root.focus_set()
sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1)
sc.frame.pack(expand=1, fill="both")
item = make_objecttreeitem("sys", sys)
node = TreeNode(sc.canvas, None, item)
node.update()
root.mainloop()
if __name__ == '__main__':
_test()
|
Cisco-Talos/pyrebox | refs/heads/master | scripts/getset_var_example.py | 1 | # -------------------------------------------------------------------------------
#
# Copyright (C) 2017 Cisco Talos Security Intelligence and Research Group
#
# PyREBox: Python scriptable Reverse Engineering Sandbox
# Author: Xabier Ugarte-Pedrero
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# -------------------------------------------------------------------------------
from __future__ import print_function
from api import CallbackManager
# Callback manager
cm = None
pyrebox_print = None
def my_createproc(params):
global cm
global pyrebox_print
pid = params["pid"]
pgd = params["pid"]
name = params["name"]
cm.set_trigger_var("createproc", "var1", pid)
cm.set_trigger_var("createproc", "var2", pgd)
cm.set_trigger_var("createproc", "var3", name)
pyrebox_print("Printing list...\n")
list_ = cm.get_trigger_var("createproc", "list0")
for el in list_:
pyrebox_print("%x - %x" % (el[0], el[1]))
pyrebox_print("Created process %x with pgd %x and name %s" % (pid, pgd, name))
def clean():
'''
Clean up everything. At least you need to place this
clean() call to the callback manager, that will
unregister all the registered callbacks.
'''
global cm
global pyrebox_print
pyrebox_print("[*] Cleaning module")
cm.clean()
pyrebox_print("[*] Cleaned module")
def initialize_callbacks(module_hdl, printer):
'''
Initilize callbacks for this module. This function
will be triggered whenever import_module command
is triggered.
'''
global cm
global pyrebox_print
pyrebox_print = printer
pyrebox_print("[*] Initializing callbacks")
cm = CallbackManager(module_hdl, new_style = True)
cm.add_callback(CallbackManager.CREATEPROC_CB, my_createproc, name="createproc")
cm.add_trigger("createproc", "triggers/trigger_getset_var_example.so")
cm.set_trigger_var("createproc", "var1", 0)
cm.set_trigger_var("createproc", "var2", 100)
cm.set_trigger_var("createproc", "var3", "Hello world")
pyrebox_print("[*] Initialized callbacks")
if __name__ == "__main__":
print("[*] Loading python module %s" % (__file__))
|
CUCWD/edx-platform | refs/heads/master | common/djangoapps/third_party_auth/settings.py | 9 | """Settings for the third-party auth module.
The flow for settings registration is:
The base settings file contains a boolean, ENABLE_THIRD_PARTY_AUTH, indicating
whether this module is enabled. startup.py probes the ENABLE_THIRD_PARTY_AUTH.
If true, it:
a) loads this module.
b) calls apply_settings(), passing in the Django settings
"""
from openedx.features.enterprise_support.api import insert_enterprise_pipeline_elements
_FIELDS_STORED_IN_SESSION = ['auth_entry', 'next']
_MIDDLEWARE_CLASSES = ['third_party_auth.middleware.ExceptionMiddleware']
_SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/dashboard'
_SOCIAL_AUTH_AZUREAD_OAUTH2_AUTH_EXTRA_ARGUMENTS = {
'msafed': 0
}
def apply_settings(django_settings):
"""Set provider-independent settings."""
# Whitelisted URL query parameters retrained in the pipeline session.
# Params not in this whitelist will be silently dropped.
django_settings.FIELDS_STORED_IN_SESSION = _FIELDS_STORED_IN_SESSION
# Inject exception middleware to make redirects fire.
django_settings.MIDDLEWARE_CLASSES.extend(_MIDDLEWARE_CLASSES)
# Where to send the user if there's an error during social authentication
# and we cannot send them to a more specific URL
# (see middleware.ExceptionMiddleware).
django_settings.SOCIAL_AUTH_LOGIN_ERROR_URL = '/'
# Where to send the user once social authentication is successful.
django_settings.SOCIAL_AUTH_LOGIN_REDIRECT_URL = _SOCIAL_AUTH_LOGIN_REDIRECT_URL
# Adding extra key value pair in the url query string for microsoft as per request
django_settings.SOCIAL_AUTH_AZUREAD_OAUTH2_AUTH_EXTRA_ARGUMENTS = _SOCIAL_AUTH_AZUREAD_OAUTH2_AUTH_EXTRA_ARGUMENTS
# Inject our customized auth pipeline. All auth backends must work with
# this pipeline.
django_settings.SOCIAL_AUTH_PIPELINE = [
'third_party_auth.pipeline.parse_query_params',
'social_core.pipeline.social_auth.social_details',
'social_core.pipeline.social_auth.social_uid',
'social_core.pipeline.social_auth.auth_allowed',
'social_core.pipeline.social_auth.social_user',
'third_party_auth.pipeline.associate_by_email_if_login_api',
'social_core.pipeline.user.get_username',
'third_party_auth.pipeline.set_pipeline_timeout',
'third_party_auth.pipeline.ensure_user_information',
'social_core.pipeline.user.create_user',
'social_core.pipeline.social_auth.associate_user',
'social_core.pipeline.social_auth.load_extra_data',
'social_core.pipeline.user.user_details',
'third_party_auth.pipeline.user_details_force_sync',
'third_party_auth.pipeline.set_id_verification_status',
'third_party_auth.pipeline.set_logged_in_cookies',
'third_party_auth.pipeline.login_analytics',
]
# Add enterprise pipeline elements if the enterprise app is installed
insert_enterprise_pipeline_elements(django_settings.SOCIAL_AUTH_PIPELINE)
# Required so that we can use unmodified PSA OAuth2 backends:
django_settings.SOCIAL_AUTH_STRATEGY = 'third_party_auth.strategy.ConfigurationModelStrategy'
# We let the user specify their email address during signup.
django_settings.SOCIAL_AUTH_PROTECTED_USER_FIELDS = ['email']
# Disable exceptions by default for prod so you get redirect behavior
# instead of a Django error page. During development you may want to
# enable this when you want to get stack traces rather than redirections.
django_settings.SOCIAL_AUTH_RAISE_EXCEPTIONS = False
# Allow users to login using social auth even if their account is not verified yet
# This is required since we [ab]use django's 'is_active' flag to indicate verified
# accounts; without this set to True, python-social-auth won't allow us to link the
# user's account to the third party account during registration (since the user is
# not verified at that point).
# We also generally allow unverified third party auth users to login (see the logic
# in ensure_user_information in pipeline.py) because otherwise users who use social
# auth to register with an invalid email address can become "stuck".
# TODO: Remove the following if/when email validation is separated from the is_active flag.
django_settings.SOCIAL_AUTH_INACTIVE_USER_LOGIN = True
django_settings.SOCIAL_AUTH_INACTIVE_USER_URL = '/auth/inactive'
# Context processors required under Django.
django_settings.SOCIAL_AUTH_UUID_LENGTH = 4
django_settings.DEFAULT_TEMPLATE_ENGINE['OPTIONS']['context_processors'] += (
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
)
|
msullivan/advent-of-code | refs/heads/master | 2017/12.py | 1 | #!/usr/bin/env python3
import sys
def dfs(graph, seen, nobe):
if nobe in seen: return
seen.add(nobe)
for child in graph[nobe]:
dfs(graph, seen, child)
def main(args):
data = [s.strip() for s in sys.stdin]
graph = {}
for line in data:
k, vs = line.split(" <-> ")
vs = vs.split(", ")
graph[k] = set(vs)
seen = set()
dfs(graph, seen, "0")
print(len(seen))
num = 1
for k in graph.keys():
if k not in seen: num += 1
dfs(graph, seen, k)
print(num)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
trevor/calendarserver | refs/heads/master | twistedcaldav/simpleresource.py | 1 | ##
# Copyright (c) 2009-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Implements a simple non-file resource.
"""
__all__ = [
"SimpleResource",
"SimpleCalDAVResource",
"SimpleRedirectResource",
"SimpleDataResource",
]
from txweb2 import http
from txweb2.dav.noneprops import NonePropertyStore
from txweb2.http import Response
from twisted.internet.defer import succeed
from twistedcaldav.config import config
from twistedcaldav.resource import CalDAVResource
from txdav.xml import element as davxml
class SimpleResource (
CalDAVResource,
):
allReadACL = davxml.ACL(
# Read access for all users.
davxml.ACE(
davxml.Principal(davxml.All()),
davxml.Grant(davxml.Privilege(davxml.Read())),
davxml.Protected(),
),
)
authReadACL = davxml.ACL(
# Read access for authenticated users.
davxml.ACE(
davxml.Principal(davxml.Authenticated()),
davxml.Grant(davxml.Privilege(davxml.Read())),
davxml.Protected(),
),
)
def __init__(self, principalCollections, isdir=False, defaultACL=authReadACL):
"""
Make sure it is a collection.
"""
CalDAVResource.__init__(self, principalCollections=principalCollections)
self._isDir = isdir
self.defaultACL = defaultACL
def isCollection(self):
return self._isDir
def deadProperties(self):
if not hasattr(self, "_dead_properties"):
self._dead_properties = NonePropertyStore(self)
return self._dead_properties
def etag(self):
return succeed(None)
def accessControlList(self, request, inheritance=True, expanding=False, inherited_aces=None):
return succeed(self.defaultACL)
SimpleCalDAVResource = SimpleResource
class SimpleRedirectResource(SimpleResource):
"""
A L{SimpleResource} which always performs a redirect.
"""
def __init__(self, principalCollections, isdir=False, defaultACL=SimpleResource.authReadACL, **kwargs):
"""
Parameters are URL components and are the same as those for
L{urlparse.urlunparse}. URL components which are not specified will
default to the corresponding component of the URL of the request being
redirected.
"""
SimpleResource.__init__(self, principalCollections=principalCollections, isdir=isdir, defaultACL=defaultACL)
self._kwargs = kwargs
def renderHTTP(self, request):
return http.RedirectResponse(request.unparseURL(host=config.ServerHostName, **self._kwargs))
class SimpleDataResource(SimpleResource):
"""
A L{SimpleResource} which returns fixed content.
"""
def __init__(self, principalCollections, content_type, data, defaultACL=SimpleResource.authReadACL):
"""
@param content_type: the mime content-type of the data
@type content_type: L{MimeType}
@param data: the data
@type data: C{str}
"""
SimpleResource.__init__(self, principalCollections=principalCollections, isdir=False, defaultACL=defaultACL)
self.content_type = content_type
self.data = data
def contentType(self):
return self.content_type
def render(self, request):
response = Response(200, {}, self.data)
response.headers.setHeader("content-type", self.content_type)
return response
|
mercycorps/tola-activity | refs/heads/master | htdocs/customdashboard/admin.py | 2 | from django.contrib import admin
from .models import ProjectStatus, ProjectStatusAdmin, Gallery, GalleryAdmin, ProgramLinks,ProgramLinksAdmin, Link, LinkAdmin
class GalleryAdmin(admin.ModelAdmin):
change_form_template = 'customdashboard/admin/change_form.html'
admin.site.register(ProjectStatus, ProjectStatusAdmin)
admin.site.register(Gallery, GalleryAdmin)
admin.site.register(ProgramLinks, ProgramLinksAdmin)
admin.site.register(Link, LinkAdmin)
|
jorsea/odoo-saas-tools | refs/heads/master | saas_utils/connector.py | 13 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010, 2014 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp
from openerp import SUPERUSER_ID
def call(dbname, model, method, *args, **kwargs):
instance = openerp.registry(dbname)
with instance.cursor() as cr:
obj = instance.get(model)
if hasattr(obj, method):
return getattr(obj, method)(cr, SUPERUSER_ID, *args, **kwargs)
|
martonw/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/QueueStatusServer/model/queuelog.py | 122 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from time import time
from datetime import datetime
from google.appengine.ext import db
from model.workitems import WorkItems
from model.activeworkitems import ActiveWorkItems
class QueueLog(db.Model):
date = db.DateTimeProperty()
# duration specifies in seconds the time period these log values apply to.
duration = db.IntegerProperty()
queue_name = db.StringProperty()
bot_ids_seen = db.StringListProperty()
max_patches_waiting = db.IntegerProperty(default=0)
patch_wait_durations = db.ListProperty(int)
patch_process_durations = db.ListProperty(int)
patch_retry_count = db.IntegerProperty(default=0)
status_update_count = db.IntegerProperty(default=0)
@staticmethod
def create_key(queue_name, duration, timestamp):
return "%s-%s-%s" % (queue_name, duration, timestamp)
@classmethod
def get_at(cls, queue_name, duration, timestamp):
timestamp = int(timestamp / duration) * duration
date = datetime.utcfromtimestamp(timestamp)
key = cls.create_key(queue_name, duration, timestamp)
return cls.get_or_create(key, date=date, duration=duration, queue_name=queue_name)
@classmethod
def get_current(cls, queue_name, duration):
return cls.get_at(queue_name, duration, time())
# This is to prevent page requests from generating lots of rows in the database.
@classmethod
def get_or_create(cls, key_name, **kwargs):
return db.run_in_transaction(cls._get_or_create_txn, key_name, **kwargs)
def update_max_patches_waiting(self):
patches_waiting = self._get_patches_waiting(self.queue_name)
if patches_waiting > self.max_patches_waiting:
self.max_patches_waiting = patches_waiting
return True
return False
@classmethod
def _get_or_create_txn(cls, key_name, **kwargs):
entity = cls.get_by_key_name(key_name, parent=kwargs.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwargs)
return entity
@classmethod
def _get_patches_waiting(cls, queue_name):
work_items = WorkItems.lookup_by_queue(queue_name)
active_work_items = ActiveWorkItems.lookup_by_queue(queue_name)
return len(set(work_items.item_ids) - set(active_work_items.item_ids))
|
Poles/Poles | refs/heads/master | platforms/linux/SDL2_ttf-2.0.12/external/freetype-2.4.12/src/tools/chktrcmp.py | 381 | #!/usr/bin/env python
#
# Check trace components in FreeType 2 source.
# Author: suzuki toshiya, 2009
#
# This code is explicitly into the public domain.
import sys
import os
import re
SRC_FILE_LIST = []
USED_COMPONENT = {}
KNOWN_COMPONENT = {}
SRC_FILE_DIRS = [ "src" ]
TRACE_DEF_FILES = [ "include/freetype/internal/fttrace.h" ]
# --------------------------------------------------------------
# Parse command line options
#
for i in range( 1, len( sys.argv ) ):
if sys.argv[i].startswith( "--help" ):
print "Usage: %s [option]" % sys.argv[0]
print "Search used-but-defined and defined-but-not-used trace_XXX macros"
print ""
print " --help:"
print " Show this help"
print ""
print " --src-dirs=dir1:dir2:..."
print " Specify the directories of C source files to be checked"
print " Default is %s" % ":".join( SRC_FILE_DIRS )
print ""
print " --def-files=file1:file2:..."
print " Specify the header files including FT_TRACE_DEF()"
print " Default is %s" % ":".join( TRACE_DEF_FILES )
print ""
exit(0)
if sys.argv[i].startswith( "--src-dirs=" ):
SRC_FILE_DIRS = sys.argv[i].replace( "--src-dirs=", "", 1 ).split( ":" )
elif sys.argv[i].startswith( "--def-files=" ):
TRACE_DEF_FILES = sys.argv[i].replace( "--def-files=", "", 1 ).split( ":" )
# --------------------------------------------------------------
# Scan C source and header files using trace macros.
#
c_pathname_pat = re.compile( '^.*\.[ch]$', re.IGNORECASE )
trace_use_pat = re.compile( '^[ \t]*#define[ \t]+FT_COMPONENT[ \t]+trace_' )
for d in SRC_FILE_DIRS:
for ( p, dlst, flst ) in os.walk( d ):
for f in flst:
if c_pathname_pat.match( f ) != None:
src_pathname = os.path.join( p, f )
line_num = 0
for src_line in open( src_pathname, 'r' ):
line_num = line_num + 1
src_line = src_line.strip()
if trace_use_pat.match( src_line ) != None:
component_name = trace_use_pat.sub( '', src_line )
if component_name in USED_COMPONENT:
USED_COMPONENT[component_name].append( "%s:%d" % ( src_pathname, line_num ) )
else:
USED_COMPONENT[component_name] = [ "%s:%d" % ( src_pathname, line_num ) ]
# --------------------------------------------------------------
# Scan header file(s) defining trace macros.
#
trace_def_pat_opn = re.compile( '^.*FT_TRACE_DEF[ \t]*\([ \t]*' )
trace_def_pat_cls = re.compile( '[ \t\)].*$' )
for f in TRACE_DEF_FILES:
line_num = 0
for hdr_line in open( f, 'r' ):
line_num = line_num + 1
hdr_line = hdr_line.strip()
if trace_def_pat_opn.match( hdr_line ) != None:
component_name = trace_def_pat_opn.sub( '', hdr_line )
component_name = trace_def_pat_cls.sub( '', component_name )
if component_name in KNOWN_COMPONENT:
print "trace component %s is defined twice, see %s and fttrace.h:%d" % \
( component_name, KNOWN_COMPONENT[component_name], line_num )
else:
KNOWN_COMPONENT[component_name] = "%s:%d" % \
( os.path.basename( f ), line_num )
# --------------------------------------------------------------
# Compare the used and defined trace macros.
#
print "# Trace component used in the implementations but not defined in fttrace.h."
cmpnt = USED_COMPONENT.keys()
cmpnt.sort()
for c in cmpnt:
if c not in KNOWN_COMPONENT:
print "Trace component %s (used in %s) is not defined." % ( c, ", ".join( USED_COMPONENT[c] ) )
print "# Trace component is defined but not used in the implementations."
cmpnt = KNOWN_COMPONENT.keys()
cmpnt.sort()
for c in cmpnt:
if c not in USED_COMPONENT:
if c != "any":
print "Trace component %s (defined in %s) is not used." % ( c, KNOWN_COMPONENT[c] )
|
trevor/calendarserver | refs/heads/master | txdav/who/xml.py | 1 | # -*- test-case-name: txdav.who.test.test_xml -*-
##
# Copyright (c) 2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
from __future__ import absolute_import
"""
Calendar and contacts directory extentions to L{twext.who.xml}.
Example:
<directory realm="Test Realm">
<record type="location">
<uid>__wsanchez_office__</uid>
<short-name>Wilfredo Sanchez' Office</short-name>
<service-node>Node B</service-node> <!-- unicode -->
<auto-accept-group>__wsanchez_staff__</auto-accept-group> <!-- UID of group -->
<login-allowed><true /></login-allowed>
<has-calendars><true /></has-calendars>
<has-contacts><true /></has-contacts>
<auto-schedule-mode><accept-if-free-decline-if-busy /></auto-schedule-mode>
</record>
</directory>
"""
__all__ = [
"DirectoryService",
]
from twisted.python.constants import Values, ValueConstant
from twext.who.xml import DirectoryService as BaseDirectoryService
from twext.who.util import ConstantsContainer
from .idirectory import RecordType, FieldName, AutoScheduleMode
#
# XML constants
#
class Element(Values):
"""
XML calendar and contacts element names.
"""
# Provisioning fields
serviceNodeUID = ValueConstant(u"service-node")
serviceNodeUID.fieldName = FieldName.serviceNodeUID
loginAllowed = ValueConstant(u"login-allowed")
loginAllowed.fieldName = FieldName.loginAllowed
hasCalendars = ValueConstant(u"has-calendars")
hasCalendars.fieldName = FieldName.hasCalendars
hasContacts = ValueConstant(u"has-contacts")
hasContacts.fieldName = FieldName.hasContacts
# Auto-schedule fields
autoScheduleMode = ValueConstant(u"auto-schedule-mode")
autoScheduleMode.fieldName = FieldName.autoScheduleMode
autoAcceptGroup = ValueConstant(u"auto-accept-group")
autoAcceptGroup.fieldName = FieldName.autoAcceptGroup
# Auto-schedule modes
none = ValueConstant(u"none")
none.constantValue = AutoScheduleMode.none
accept = ValueConstant(u"accept")
accept.constantValue = AutoScheduleMode.accept
decline = ValueConstant(u"decline")
decline.constantValue = AutoScheduleMode.decline
acceptIfFree = ValueConstant(u"accept-if-free")
acceptIfFree.constantValue = AutoScheduleMode.acceptIfFree
declineIfBusy = ValueConstant(u"decline-if-busy")
declineIfBusy.constantValue = AutoScheduleMode.declineIfBusy
acceptIfFreeDeclineIfBusy = ValueConstant(
u"accept-if-free-decline-if-busy"
)
acceptIfFreeDeclineIfBusy.constantValue = (
AutoScheduleMode.acceptIfFreeDeclineIfBusy
)
# For "locations", i.e., scheduled spaces:
capacity = ValueConstant(u"capacity")
capacity.fieldName = FieldName.capacity
floor = ValueConstant(u"floor")
floor.fieldName = FieldName.floor
associatedAddress = ValueConstant(u"associated-address")
associatedAddress.fieldName = FieldName.associatedAddress
# For "addresses", i.e., non-scheduled areas containing locations:
abbreviatedName = ValueConstant(u"abbreviated-name")
abbreviatedName.fieldName = FieldName.abbreviatedName
streetAddress = ValueConstant(u"street-address")
streetAddress.fieldName = FieldName.streetAddress
geographicLocation = ValueConstant(u"geographic-location")
geographicLocation.fieldName = FieldName.geographicLocation
class Attribute(Values):
"""
XML calendar and contacts attribute names.
"""
class RecordTypeValue(Values):
"""
XML attribute values for calendar and contacts record types.
"""
location = ValueConstant(u"location")
location.recordType = RecordType.location
resource = ValueConstant(u"resource")
resource.recordType = RecordType.resource
address = ValueConstant(u"address")
address.recordType = RecordType.address
#
# Directory Service
#
class DirectoryService(BaseDirectoryService):
"""
XML directory service with calendar and contacts data.
"""
recordType = ConstantsContainer(
(BaseDirectoryService.recordType, RecordType)
)
# MOVE2WHO: Wilfredo had added augment fields into xml, which does make
# some sense, but for backwards compatibility right now I will take those
# out, and rely on a separate augment service
# fieldName = ConstantsContainer(
# (BaseDirectoryService.fieldName, FieldName)
# )
# XML schema constants
element = ConstantsContainer(
(BaseDirectoryService.element, Element)
)
attribute = ConstantsContainer(
(BaseDirectoryService.attribute, Attribute)
)
recordTypeValue = ConstantsContainer(
(BaseDirectoryService.recordTypeValue, RecordTypeValue)
)
|
C-Aniruddh/ace-kernel_lettuce | refs/heads/cm-12.1 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
icodedev7/customap | refs/heads/master | devkit/mingw/bin/lib/lib2to3/fixer_base.py | 305 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Base class for fixers (optional, but recommended)."""
# Python imports
import logging
import itertools
# Local imports
from .patcomp import PatternCompiler
from . import pygram
from .fixer_util import does_tree_import
class BaseFix(object):
"""Optional base class for fixers.
The subclass name must be FixFooBar where FooBar is the result of
removing underscores and capitalizing the words of the fix name.
For example, the class name for a fixer named 'has_key' should be
FixHasKey.
"""
PATTERN = None # Most subclasses should override with a string literal
pattern = None # Compiled pattern, set by compile_pattern()
pattern_tree = None # Tree representation of the pattern
options = None # Options object passed to initializer
filename = None # The filename (set by set_filename)
logger = None # A logger (set by set_filename)
numbers = itertools.count(1) # For new_name()
used_names = set() # A set of all used NAMEs
order = "post" # Does the fixer prefer pre- or post-order traversal
explicit = False # Is this ignored by refactor.py -f all?
run_order = 5 # Fixers will be sorted by run order before execution
# Lower numbers will be run first.
_accept_type = None # [Advanced and not public] This tells RefactoringTool
# which node type to accept when there's not a pattern.
keep_line_order = False # For the bottom matcher: match with the
# original line order
BM_compatible = False # Compatibility with the bottom matching
# module; every fixer should set this
# manually
# Shortcut for access to Python grammar symbols
syms = pygram.python_symbols
def __init__(self, options, log):
"""Initializer. Subclass may override.
Args:
options: an dict containing the options passed to RefactoringTool
that could be used to customize the fixer through the command line.
log: a list to append warnings and other messages to.
"""
self.options = options
self.log = log
self.compile_pattern()
def compile_pattern(self):
"""Compiles self.PATTERN into self.pattern.
Subclass may override if it doesn't want to use
self.{pattern,PATTERN} in .match().
"""
if self.PATTERN is not None:
PC = PatternCompiler()
self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
with_tree=True)
def set_filename(self, filename):
"""Set the filename, and a logger derived from it.
The main refactoring tool should call this.
"""
self.filename = filename
self.logger = logging.getLogger(filename)
def match(self, node):
"""Returns match for a given parse tree node.
Should return a true or false object (not necessarily a bool).
It may return a non-empty dict of matching sub-nodes as
returned by a matching pattern.
Subclass may override.
"""
results = {"node": node}
return self.pattern.match(node, results) and results
def transform(self, node, results):
"""Returns the transformation for a given parse tree node.
Args:
node: the root of the parse tree that matched the fixer.
results: a dict mapping symbolic names to part of the match.
Returns:
None, or a node that is a modified copy of the
argument node. The node argument may also be modified in-place to
effect the same change.
Subclass *must* override.
"""
raise NotImplementedError()
def new_name(self, template=u"xxx_todo_changeme"):
"""Return a string suitable for use as an identifier
The new name is guaranteed not to conflict with other identifiers.
"""
name = template
while name in self.used_names:
name = template + unicode(self.numbers.next())
self.used_names.add(name)
return name
def log_message(self, message):
if self.first_log:
self.first_log = False
self.log.append("### In file %s ###" % self.filename)
self.log.append(message)
def cannot_convert(self, node, reason=None):
"""Warn the user that a given chunk of code is not valid Python 3,
but that it cannot be converted automatically.
First argument is the top-level node for the code in question.
Optional second argument is why it can't be converted.
"""
lineno = node.get_lineno()
for_output = node.clone()
for_output.prefix = u""
msg = "Line %d: could not convert: %s"
self.log_message(msg % (lineno, for_output))
if reason:
self.log_message(reason)
def warning(self, node, reason):
"""Used for warning the user about possible uncertainty in the
translation.
First argument is the top-level node for the code in question.
Optional second argument is why it can't be converted.
"""
lineno = node.get_lineno()
self.log_message("Line %d: %s" % (lineno, reason))
def start_tree(self, tree, filename):
"""Some fixers need to maintain tree-wide state.
This method is called once, at the start of tree fix-up.
tree - the root node of the tree to be processed.
filename - the name of the file the tree came from.
"""
self.used_names = tree.used_names
self.set_filename(filename)
self.numbers = itertools.count(1)
self.first_log = True
def finish_tree(self, tree, filename):
"""Some fixers need to maintain tree-wide state.
This method is called once, at the conclusion of tree fix-up.
tree - the root node of the tree to be processed.
filename - the name of the file the tree came from.
"""
pass
class ConditionalFix(BaseFix):
""" Base class for fixers which not execute if an import is found. """
# This is the name of the import which, if found, will cause the test to be skipped
skip_on = None
def start_tree(self, *args):
super(ConditionalFix, self).start_tree(*args)
self._should_skip = None
def should_skip(self, node):
if self._should_skip is not None:
return self._should_skip
pkg = self.skip_on.split(".")
name = pkg[-1]
pkg = ".".join(pkg[:-1])
self._should_skip = does_tree_import(pkg, name, node)
return self._should_skip
|
Xaltotun/conan | refs/heads/develop | conans/test/command/help_test.py | 3 | import unittest
from conans.test.tools import TestClient
class BasicTest(unittest.TestCase):
def help_test(self):
conan = TestClient()
conan.run("")
self.assertIn('Conan commands. Type $conan "command" -h', conan.user_io.out)
|
snahelou/awx | refs/heads/devel | awx/main/tests/functional/api/test_job_runtime_params.py | 1 | import mock
import pytest
import yaml
from awx.api.serializers import JobLaunchSerializer
from awx.main.models.credential import Credential
from awx.main.models.inventory import Inventory, Host
from awx.main.models.jobs import Job, JobTemplate
from awx.api.versioning import reverse
@pytest.fixture
def runtime_data(organization, credentialtype_ssh):
cred_obj = Credential.objects.create(
name='runtime-cred',
credential_type=credentialtype_ssh,
inputs={
'username': 'test_user2',
'password': 'pas4word2'
}
)
inv_obj = organization.inventories.create(name="runtime-inv")
return dict(
extra_vars='{"job_launch_var": 4}',
limit='test-servers',
job_type='check',
job_tags='provision',
skip_tags='restart',
inventory=inv_obj.pk,
credential=cred_obj.pk,
)
@pytest.fixture
def job_with_links(machine_credential, inventory):
return Job.objects.create(name='existing-job', credential=machine_credential, inventory=inventory)
@pytest.fixture
def job_template_prompts(project, inventory, machine_credential):
def rf(on_off):
return JobTemplate.objects.create(
job_type='run',
project=project,
inventory=inventory,
credential=machine_credential,
name='deploy-job-template',
ask_variables_on_launch=on_off,
ask_tags_on_launch=on_off,
ask_skip_tags_on_launch=on_off,
ask_job_type_on_launch=on_off,
ask_inventory_on_launch=on_off,
ask_limit_on_launch=on_off,
ask_credential_on_launch=on_off,
ask_verbosity_on_launch=on_off,
)
return rf
@pytest.fixture
def job_template_prompts_null(project):
return JobTemplate.objects.create(
job_type='run',
project=project,
inventory=None,
credential=None,
name='deploy-job-template',
ask_variables_on_launch=True,
ask_tags_on_launch=True,
ask_skip_tags_on_launch=True,
ask_job_type_on_launch=True,
ask_inventory_on_launch=True,
ask_limit_on_launch=True,
ask_credential_on_launch=True,
ask_verbosity_on_launch=True,
)
# End of setup, tests start here
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
job_template = job_template_prompts(False)
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars':{}},)
# Check that job is serialized correctly
job_id = response.data['job']
assert job_id == 968
# If job is created with no arguments, it will inherit JT attributes
mock_job.signal_start.assert_called_once()
# Check that response tells us what things were ignored
assert 'job_launch_var' in response.data['ignored_fields']['extra_vars']
assert 'job_type' in response.data['ignored_fields']
assert 'limit' in response.data['ignored_fields']
assert 'inventory' in response.data['ignored_fields']
assert 'credential' in response.data['ignored_fields']
assert 'job_tags' in response.data['ignored_fields']
assert 'skip_tags' in response.data['ignored_fields']
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
job_template = job_template_prompts(True)
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == (runtime_data,)
job_id = response.data['job']
assert job_id == 968
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_accept_null_tags(job_template_prompts, post, admin_user, mocker):
job_template = job_template_prompts(True)
mock_job = mocker.MagicMock(spec=Job, id=968)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
{'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'job_tags':'', 'skip_tags':''},)
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null, post, rando, mocker):
job_template = job_template_prompts_null
# Give user permission to execute the job template
job_template.execute_role.members.add(rando)
# Give user permission to use inventory and credential at runtime
credential = Credential.objects.get(pk=runtime_data['credential'])
credential.use_role.members.add(rando)
inventory = Inventory.objects.get(pk=runtime_data['inventory'])
inventory.use_role.members.add(rando)
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
runtime_data, rando, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == (runtime_data,)
job_id = response.data['job']
assert job_id == 968
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
response = post(
reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(job_type='foobicate', # foobicate is not a valid job type
inventory=87865, credential=48474), admin_user, expect=400)
assert response.data['job_type'] == [u'"foobicate" is not a valid choice.']
assert response.data['inventory'] == [u'Invalid pk "87865" - object does not exist.']
assert response.data['credential'] == [u'Invalid pk "48474" - object does not exist.']
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
response = post(
reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(extra_vars='{"unbalanced brackets":'), admin_user, expect=400)
assert 'extra_vars' in response.data
assert 'valid JSON or YAML' in str(response.data['extra_vars'][0])
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, admin_user):
deploy_jobtemplate.inventory = None
deploy_jobtemplate.save()
response = post(reverse('api:job_template_launch',
kwargs={'pk': deploy_jobtemplate.pk}), {}, admin_user, expect=400)
assert response.data['inventory'] == ["Job Template 'inventory' is missing or undefined."]
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_launch_fails_without_inventory_access(job_template_prompts, runtime_data, post, rando):
job_template = job_template_prompts(True)
job_template.execute_role.members.add(rando)
# Assure that giving an inventory without access to the inventory blocks the launch
response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(inventory=runtime_data['inventory']), rando, expect=403)
assert response.data['detail'] == u'You do not have permission to perform this action.'
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_launch_fails_without_credential_access(job_template_prompts, runtime_data, post, rando):
job_template = job_template_prompts(True)
job_template.execute_role.members.add(rando)
# Assure that giving a credential without access blocks the launch
response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(credential=runtime_data['credential']), rando, expect=403)
assert response.data['detail'] == u'You do not have permission to perform this action.'
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
# Assure that changing the type of a scan job blocks the launch
response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(job_type='scan'), admin_user, expect=400)
assert 'job_type' in response.data
@pytest.mark.django_db
def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
deploy_jobtemplate.extra_vars = '{"job_template_var": 3}'
deploy_jobtemplate.ask_credential_on_launch = True
deploy_jobtemplate.save()
kv = dict(extra_vars={"job_launch_var": 4}, credential=machine_credential.id)
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data=kv,
context={'obj': deploy_jobtemplate, 'data': kv, 'passwords': {}})
validated = serializer.is_valid()
assert validated
job_obj = deploy_jobtemplate.create_unified_job(**kv)
final_job_extra_vars = yaml.load(job_obj.extra_vars)
assert 'job_template_var' in final_job_extra_vars
assert 'job_launch_var' in final_job_extra_vars
assert job_obj.credential.id == machine_credential.id
@pytest.mark.django_db
@pytest.mark.parametrize('pks, error_msg', [
([1], 'must be network or cloud'),
([999], 'object does not exist'),
])
def test_job_launch_JT_with_invalid_extra_credentials(machine_credential, deploy_jobtemplate, pks, error_msg):
deploy_jobtemplate.ask_credential_on_launch = True
deploy_jobtemplate.save()
kv = dict(extra_credentials=pks, credential=machine_credential.id)
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data=kv,
context={'obj': deploy_jobtemplate, 'data': kv, 'passwords': {}})
validated = serializer.is_valid()
assert validated is False
@pytest.mark.django_db
def test_job_launch_JT_enforces_unique_extra_credential_kinds(machine_credential, credentialtype_aws, deploy_jobtemplate):
"""
JT launching should require that extra_credentials have distinct CredentialTypes
"""
pks = []
for i in range(2):
aws = Credential.objects.create(
name='cred-%d' % i,
credential_type=credentialtype_aws,
inputs={
'username': 'test_user',
'password': 'pas4word'
}
)
aws.save()
pks.append(aws.pk)
kv = dict(extra_credentials=pks, credential=machine_credential.id)
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data=kv,
context={'obj': deploy_jobtemplate, 'data': kv, 'passwords': {}})
validated = serializer.is_valid()
assert validated is False
@pytest.mark.django_db
@pytest.mark.parametrize('ask_credential_on_launch', [True, False])
def test_job_launch_with_no_credentials(deploy_jobtemplate, ask_credential_on_launch):
deploy_jobtemplate.credential = None
deploy_jobtemplate.vault_credential = None
deploy_jobtemplate.ask_credential_on_launch = ask_credential_on_launch
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data={},
context={'obj': deploy_jobtemplate, 'data': {}, 'passwords': {}})
validated = serializer.is_valid()
assert validated is False
assert serializer.errors['credential'] == ["Job Template 'credential' is missing or undefined."]
@pytest.mark.django_db
def test_job_launch_with_only_vault_credential(vault_credential, deploy_jobtemplate):
deploy_jobtemplate.credential = None
deploy_jobtemplate.vault_credential = vault_credential
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data={},
context={'obj': deploy_jobtemplate, 'data': {}, 'passwords': {}})
validated = serializer.is_valid()
assert validated
prompted_fields, ignored_fields = deploy_jobtemplate._accept_or_ignore_job_kwargs(**{})
job_obj = deploy_jobtemplate.create_unified_job(**prompted_fields)
assert job_obj.vault_credential.pk == vault_credential.pk
@pytest.mark.django_db
def test_job_launch_with_vault_credential_ask_for_machine(vault_credential, deploy_jobtemplate):
deploy_jobtemplate.credential = None
deploy_jobtemplate.ask_credential_on_launch = True
deploy_jobtemplate.vault_credential = vault_credential
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data={},
context={'obj': deploy_jobtemplate, 'data': {}, 'passwords': {}})
validated = serializer.is_valid()
assert validated
prompted_fields, ignored_fields = deploy_jobtemplate._accept_or_ignore_job_kwargs(**{})
job_obj = deploy_jobtemplate.create_unified_job(**prompted_fields)
assert job_obj.credential is None
assert job_obj.vault_credential.pk == vault_credential.pk
@pytest.mark.django_db
def test_job_launch_with_vault_credential_and_prompted_machine_cred(machine_credential, vault_credential,
deploy_jobtemplate):
deploy_jobtemplate.credential = None
deploy_jobtemplate.ask_credential_on_launch = True
deploy_jobtemplate.vault_credential = vault_credential
kv = dict(credential=machine_credential.id)
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data=kv,
context={'obj': deploy_jobtemplate, 'data': kv, 'passwords': {}})
validated = serializer.is_valid()
assert validated
prompted_fields, ignored_fields = deploy_jobtemplate._accept_or_ignore_job_kwargs(**kv)
job_obj = deploy_jobtemplate.create_unified_job(**prompted_fields)
assert job_obj.credential.pk == machine_credential.pk
assert job_obj.vault_credential.pk == vault_credential.pk
@pytest.mark.django_db
def test_job_launch_JT_with_default_vault_credential(machine_credential, vault_credential, deploy_jobtemplate):
deploy_jobtemplate.credential = machine_credential
deploy_jobtemplate.vault_credential = vault_credential
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data={},
context={'obj': deploy_jobtemplate, 'data': {}, 'passwords': {}})
validated = serializer.is_valid()
assert validated
prompted_fields, ignored_fields = deploy_jobtemplate._accept_or_ignore_job_kwargs(**{})
job_obj = deploy_jobtemplate.create_unified_job(**prompted_fields)
assert job_obj.vault_credential.pk == vault_credential.pk
@pytest.mark.django_db
def test_job_launch_fails_with_missing_vault_password(machine_credential, vault_credential,
deploy_jobtemplate, post, rando):
vault_credential.vault_password = 'ASK'
vault_credential.save()
deploy_jobtemplate.credential = machine_credential
deploy_jobtemplate.vault_credential = vault_credential
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
response = post(
reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
rando,
expect=400
)
assert response.data['passwords_needed_to_start'] == ['vault_password']
@pytest.mark.django_db
def test_job_launch_fails_with_missing_ssh_password(machine_credential, deploy_jobtemplate, post,
rando):
machine_credential.password = 'ASK'
machine_credential.save()
deploy_jobtemplate.credential = machine_credential
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
response = post(
reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
rando,
expect=400
)
assert response.data['passwords_needed_to_start'] == ['ssh_password']
@pytest.mark.django_db
def test_job_launch_fails_with_missing_vault_and_ssh_password(machine_credential, vault_credential,
deploy_jobtemplate, post, rando):
vault_credential.vault_password = 'ASK'
vault_credential.save()
machine_credential.password = 'ASK'
machine_credential.save()
deploy_jobtemplate.credential = machine_credential
deploy_jobtemplate.vault_credential = vault_credential
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
response = post(
reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
rando,
expect=400
)
assert sorted(response.data['passwords_needed_to_start']) == ['ssh_password', 'vault_password']
@pytest.mark.django_db
def test_job_launch_pass_with_prompted_vault_password(machine_credential, vault_credential,
deploy_jobtemplate, post, rando):
vault_credential.vault_password = 'ASK'
vault_credential.save()
deploy_jobtemplate.credential = machine_credential
deploy_jobtemplate.vault_credential = vault_credential
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
with mock.patch.object(Job, 'signal_start') as signal_start:
post(
reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
{'vault_password': 'vault-me'},
rando,
expect=201
)
signal_start.assert_called_with(vault_password='vault-me')
@pytest.mark.django_db
def test_job_launch_JT_with_extra_credentials(machine_credential, credential, net_credential, deploy_jobtemplate):
deploy_jobtemplate.ask_credential_on_launch = True
deploy_jobtemplate.save()
kv = dict(extra_credentials=[credential.pk, net_credential.pk], credential=machine_credential.id)
serializer = JobLaunchSerializer(
instance=deploy_jobtemplate, data=kv,
context={'obj': deploy_jobtemplate, 'data': kv, 'passwords': {}})
validated = serializer.is_valid()
assert validated
prompted_fields, ignored_fields = deploy_jobtemplate._accept_or_ignore_job_kwargs(**kv)
job_obj = deploy_jobtemplate.create_unified_job(**prompted_fields)
extra_creds = job_obj.extra_credentials.all()
assert len(extra_creds) == 2
assert credential in extra_creds
assert net_credential in extra_creds
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job_template_prompts, post, admin_user):
job_template = job_template_prompts(False)
job_template.survey_enabled = True
job_template.survey_spec = survey_spec_factory('survey_var')
job_template.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
response = post(
reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars':{'survey_var': 4}},)
job_id = response.data['job']
assert job_id == 968
# Check that the survey variable is accepted and the job variable isn't
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_template_prompts, post, admin_user, host):
job_template = job_template_prompts(True)
job_template.host_config_key = "foo"
job_template.survey_enabled = True
job_template.survey_spec = survey_spec_factory('survey_var')
job_template.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
post(
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
admin_user, expect=201, format='json')
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4,
'job_launch_var': 3},
'launch_type': 'callback',
'limit': 'single-host'},)
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_template_prompts, post, admin_user, host):
job_template = job_template_prompts(False)
job_template.host_config_key = "foo"
job_template.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
post(
reverse('api:job_template_callback', kwargs={'pk':job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
admin_user, expect=201, format='json')
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'launch_type': 'callback',
'limit': 'single-host'},)
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_user):
job_template = job_template_prompts(False)
job_template.host_config_key = "foo"
job_template.save()
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
host_with_alias.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
user=admin_user, expect=200)
assert tuple(r.data['matching_hosts']) == ('localhost',)
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_template_prompts, admin_user):
job_template = job_template_prompts(False)
job_template.host_config_key = "foo"
job_template.save()
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
host_with_alias.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
user=admin_user, expect=200)
assert not r.data['matching_hosts']
|
danny200309/anaconda | refs/heads/master | commands/doc.py | 5 |
# Copyright (C) 2013 - Oscar Campos <oscar.campos@member.fsf.org>
# This program is Free Software see LICENSE file for details
from functools import partial
import sublime
import sublime_plugin
from ..anaconda_lib.worker import Worker
from ..anaconda_lib.tooltips import Tooltip
from ..anaconda_lib.callback import Callback
from ..anaconda_lib.helpers import prepare_send_data, is_python, get_settings
class AnacondaDoc(sublime_plugin.TextCommand):
"""Jedi get documentation string for Sublime Text
"""
documentation = None
def run(self, edit):
if self.documentation is None:
try:
location = self.view.rowcol(self.view.sel()[0].begin())
if self.view.substr(self.view.sel()[0].begin()) in ['(', ')']:
location = (location[0], location[1] - 1)
data = prepare_send_data(location, 'doc', 'jedi')
if int(sublime.version()) >= 3070:
data['html'] = get_settings(
self.view, 'enable_docstrings_tooltip', False)
Worker().execute(
Callback(on_success=self.prepare_data), **data
)
except Exception as error:
print(error)
else:
if (get_settings(self.view, 'enable_docstrings_tooltip', False)
and int(sublime.version()) >= 3070):
self.print_popup(edit)
else:
self.print_doc(edit)
def is_enabled(self):
"""Determine if this command is enabled or not
"""
return is_python(self.view)
def prepare_data(self, data):
"""Prepare the returned data
"""
if data['success']:
self.documentation = data['doc']
if self.documentation is None or self.documentation == '':
self._show_status()
else:
sublime.active_window().run_command(self.name())
else:
self._show_status()
def print_doc(self, edit):
"""Print the documentation string into a Sublime Text panel
"""
doc_panel = self.view.window().create_output_panel(
'anaconda_documentation'
)
doc_panel.set_read_only(False)
region = sublime.Region(0, doc_panel.size())
doc_panel.erase(edit, region)
doc_panel.insert(edit, 0, self.documentation)
self.documentation = None
doc_panel.set_read_only(True)
doc_panel.show(0)
self.view.window().run_command(
'show_panel', {'panel': 'output.anaconda_documentation'}
)
def print_popup(self, edit):
"""Show message in a popup
"""
dlines = self.documentation.splitlines()
name = dlines[0]
docstring = ''.join(dlines[1:])
content = {'name': name, 'content': docstring}
self.documentation = None
css = get_settings(self.view, 'anaconda_tooltip_theme', 'dark')
Tooltip(css).show_tooltip(
self.view, 'doc', content, partial(self.print_doc, edit))
def _show_status(self):
"""Show message in the view status bar
"""
self.view.set_status(
'anaconda_doc', 'Anaconda: No documentation found'
)
sublime.set_timeout_async(
lambda: self.view.erase_status('anaconda_doc'), 5000
)
|
marcoantoniooliveira/labweb | refs/heads/master | oscar/apps/offer/south_migrations/0023_slugify_range_names.py | 16 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from oscar.core.utils import slugify
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
for range in orm['offer.Range'].objects.all():
range.slug = slugify(range.name)
range.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': u"orm['catalogue.AttributeEntityType']"})
},
u'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
u'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': u"orm['catalogue.AttributeOptionGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
u'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
u'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.ProductAttribute']", 'through': u"orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Category']", 'through': u"orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': u"orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Product']", 'symmetrical': 'False', 'through': u"orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
u'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.ProductAttribute']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': u"orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': u"orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'offer.benefit': {
'Meta': {'object_name': 'Benefit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_affected_items': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Range']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'value': ('oscar.models.fields.PositiveDecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})
},
u'offer.condition': {
'Meta': {'object_name': 'Condition'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Range']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'value': ('oscar.models.fields.PositiveDecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})
},
u'offer.conditionaloffer': {
'Meta': {'ordering': "['-priority']", 'object_name': 'ConditionalOffer'},
'benefit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Benefit']"}),
'condition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['offer.Condition']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_basket_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_discount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'max_global_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_user_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'num_applications': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'num_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'offer_type': ('django.db.models.fields.CharField', [], {'default': "'Site'", 'max_length': '128'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'redirect_url': ('oscar.models.fields.ExtendedURLField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'unique': 'True', 'null': 'True'}),
'start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Open'", 'max_length': '64'}),
'total_discount': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
u'offer.range': {
'Meta': {'object_name': 'Range'},
'classes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'classes'", 'blank': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excluded_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'excludes'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'included_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'includes'", 'blank': 'True', 'to': u"orm['catalogue.Category']"}),
'included_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'includes'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
'includes_all_products': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'unique': 'True', 'null': 'True'})
}
}
complete_apps = ['offer']
symmetrical = True
|
minlexx/xnovacmd | refs/heads/master | ui/xnova/xn_world.py | 1 | # -*- coding: utf-8 -*-
import datetime
import re
from PyQt5.QtCore import pyqtSignal, QThread, QMutex
from .xn_page_cache import XNovaPageCache
from .xn_page_dnl import XNovaPageDownload
from .xn_data import XNAccountInfo, XNCoords, XNFlight, XNPlanet, XNPlanetBuildingItem
from .xn_techtree import XNTechTree_instance
from .xn_parser_overview import OverviewParser
from .xn_parser_userinfo import UserInfoParser
from .xn_parser_curplanet import CurPlanetParser
from .xn_parser_imperium import ImperiumParser
from .xn_parser_galaxy import GalaxyParser
from .xn_parser_planet_buildings import PlanetBuildingsAvailParser, PlanetBuildingsProgressParser
from .xn_parser_planet_energy import PlanetEnergyResParser
from .xn_parser_shipyard import ShipyardShipsAvailParser, ShipyardBuildsInProgressParser
from .xn_parser_research import ResearchAvailParser
from .xn_parser_techtree import TechtreeParser
from .xn_parser_fleet import FleetsMaxParser
from . import xn_logger
logger = xn_logger.get(__name__, debug=True)
# created by main window to keep info about world updated
class XNovaWorld(QThread):
SIGNAL_QUIT = 0
SIGNAL_RELOAD_PAGE = 1 # args: page_name
SIGNAL_RENAME_PLANET = 2 # args: planet_id, new_name
SIGNAL_RELOAD_PLANET = 3 # args: planet_id
SIGNAL_BUILD_ITEM = 4 # args: bitem: XNPlanetBuildingItem, quantity, planet_id
SIGNAL_BUILD_CANCEL = 5 # args: bitem: XNPlanetBuildingItem, planet_id
SIGNAL_BUILD_DISMANTLE = 6 # args: bitem: XNPlanetBuildingItem, planet_id
SIGNAL_GET_URL = 7 # args: url, referer: optional
# testing signals ... ?
SIGNAL_TEST_PARSE_GALAXY = 100 # args: galaxy, system
# signal is emitted to report full world refresh progress
# str is a comment what is loading now, int is a progress percent [0..100]
world_load_progress = pyqtSignal(str, int)
# signal to be emitted when initial world loading is complete
world_load_complete = pyqtSignal()
# emitted when fleet has arrived at its destination
flight_arrived = pyqtSignal(XNFlight)
# emitted when building has completed on planet
build_complete = pyqtSignal(XNPlanet, XNPlanetBuildingItem)
# emitted when overview was reloaded (but not during world refresh)
loaded_overview = pyqtSignal()
# emitted when imperium was reloaded (but not during world refresh)
loaded_imperium = pyqtSignal()
# emitted when full planet was refreshed (but not during world refresh)
loaded_planet = pyqtSignal(int) # planet_id
# emitted when any network request has started (but not during world refresh)
net_request_started = pyqtSignal()
# emitted when network request has finished (but not during world refresh)
net_request_finished = pyqtSignal()
def __init__(self, parent=None):
super(XNovaWorld, self).__init__(parent)
self._world_is_loading = False # true is _full_refresh() is running (world is loading)
# helpers
self._page_dnl_times = dict() # last time when a page was downloaded
self._page_cache = XNovaPageCache()
self._page_cache.save_load_encoding = 'UTF-8'
self._page_downloader = XNovaPageDownload()
# parsers
self._parser_overview = OverviewParser()
self._parser_userinfo = UserInfoParser()
self._parser_curplanet = CurPlanetParser()
self._parser_imperium = ImperiumParser()
self._parser_planet_buildings_avail = PlanetBuildingsAvailParser()
self._parser_planet_buildings_progress = PlanetBuildingsProgressParser()
self._parser_planet_energy = PlanetEnergyResParser()
self._parser_shipyard_ships_avail = ShipyardShipsAvailParser()
self._parser_shipyard_progress = ShipyardBuildsInProgressParser()
self._parser_researches_avail = ResearchAvailParser()
self._parser_techtree = TechtreeParser()
self._parser_fleetmax = FleetsMaxParser()
# world/user info
self._server_time = datetime.datetime.today() # server time at last overview update
# all we need to calc server time is actually time diff with our time:
self._diff_with_server_time_secs = 0 # calculated as: our_time - server_time
self._vacation_mode = False
self._account = XNAccountInfo()
self._flights = []
self._cur_planet_id = 0
self._cur_planet_name = ''
self._cur_planet_coords = XNCoords(0, 0, 0)
self._planets = [] # list of XNPlanet
self._techtree = XNTechTree_instance()
self._new_messages_count = 0
self._get_bonus_url = None
self._server_online_players = 0
self._max_fleets_count = 0
self._cur_fleets_count = 0
# internal need
self._net_errors_count = 0
self._net_errors_list = []
self._NET_ERRORS_MAX = 100
self._mutex = QMutex(QMutex.Recursive)
self._signal_kwargs = dict()
# thread identifiers, collected here mainly for debugging purposes
# those are actually:
# - DWORD GetCurrentThreadId() in Windows
# - pthread_t pthread_current() in Linux
self._maintid = 0
self._worldtid = 0
# settings
self._overview_update_interval = 120 # seconds
self._galaxy_cache_lifetime = 60 # seconds
self._planet_buildings_cache_lifetime = 60 # seconds
self._planet_shipyard_cache_lifetime = 60 # seconds
self._planet_research_cache_lifetime = 60 # seconds
def initialize(self, cookies_dict: dict):
"""
Called from main window just before thread starts
:param cookies_dict: dictionary with cookies for networking
:return: None
"""
# load cached pages
self._page_cache.load_from_disk_cache(clean=True)
# init network session with cookies for authorization
self._page_downloader.set_cookies_from_dict(cookies_dict, do_save=True)
# misc
self._maintid = self._gettid()
logger.debug('initialized from tid={0}'.format(self._maintid))
def reload_config(self):
"""
Reloads network config in page downloader and if network settings
change, creates a new HTTP session, resulting in fact in new connection!
:return:
"""
prev_proxy = self._page_downloader.proxy
self._page_downloader.read_config()
new_proxy = self._page_downloader.proxy
# reconstruct session only if network settings were changed
if new_proxy != prev_proxy:
logger.debug('reload_config(): net proxy changed, recreating '
'HTTP session ({0} != {1})'.format(prev_proxy, new_proxy))
self._page_downloader.construct_session()
self._page_downloader.apply_useragent()
def lock(self, timeout_ms=None, raise_on_timeout=False):
"""
Locks thread mutex to protect thread state vars
:param timeout_ms: timeout ms to wait, default infinite
:param raise_on_timeout: if true, OSError/TimeoutError will be thrown on timeout
:return: True if all OK, False if not locked
"""
if timeout_ms is None:
self._mutex.lock()
return True
ret = self._mutex.tryLock(timeout_ms)
if ret:
return True
else:
if raise_on_timeout:
# python >= 3.3 has TimeoutError, others only have OSError
# raise TimeoutError('XNovaWorld: failed to get mutex lock, timeout was {0} ms.'.format(timeout_ms))
raise OSError('XNovaWorld: failed to get mutex lock, timeout was {0} ms.'.format(timeout_ms))
return False
def unlock(self):
self._mutex.unlock()
def signal_quit(self):
self.quit()
def signal(self, signal_code=0, **kwargs):
# logger.debug('signal: kwargs = {0}'.format(str(kwargs)))
# ^^: args = (), kwargs = {'page': 'overview'}
self.lock()
if kwargs is not None:
self._signal_kwargs = kwargs
self.unlock()
self.exit(signal_code) # QEventLoop.exit(code) makes thread's
# event loop to exit with code
###################################
# Getters
###################################
def get_account_info(self) -> XNAccountInfo:
self.lock()
ret = self._account
self.unlock()
return ret
def set_login_email(self, email: str):
self.lock()
self._account.email = email
self.unlock()
def get_flights(self) -> list:
ret = []
# try to lock with 0ms wait, if fails, return empty list
if self.lock(0):
ret = self._flights
self.unlock()
return ret
def get_flight_remaining_time_secs(self, fl: XNFlight) -> int:
"""
Calculates flight remaining time, adjusting time by difference
between our time and server
:param fl: flight
:return: remaining time in seconds, or -1 on error
"""
self.lock()
dsts = self._diff_with_server_time_secs
self.unlock()
#
secs_left = -1
if fl.seconds_left != -1:
secs_left = fl.seconds_left + dsts
#
return secs_left
def get_current_server_time(self) -> datetime.datetime:
"""
Calculates current server time (at this moment), using
previously calculated time diff (checked at last update)
:return:
"""
self.lock()
dt_now = datetime.datetime.today()
dt_delta = datetime.timedelta(seconds=self._diff_with_server_time_secs)
dt_server = dt_now - dt_delta
self.unlock()
return dt_server
def get_planets(self, timeout_ms=None) -> list:
"""
Gets list of planets (XNPlanet)
:param timeout_ms: milliseconds to wait the lock, default infinite
:return list of [XPlanet()]
"""
ret = []
# try to lock with 0ms wait, if fails, return empty list
if self.lock(timeout_ms):
ret = self._planets
self.unlock()
return ret
def get_planet(self, planet_id) -> XNPlanet:
pls = self.get_planets()
for pl in pls:
if pl.planet_id == planet_id:
return pl
logger.warn('Could not find planet with id: {0}'.format(planet_id))
return None
def get_new_messages_count(self) -> int:
self.lock()
ret = self._new_messages_count
self.unlock()
return ret
def get_online_players(self):
self.lock()
ret = self._server_online_players
self.unlock()
return ret
def get_fleets_count(self) -> list:
"""
Get current/maximum fleets count
:return: list[0] = cur, list[1] = max
"""
ret = [0, 0]
self.lock()
ret[0] = self._cur_fleets_count
ret[1] = self._max_fleets_count
self.unlock()
return ret
def get_bonus_url(self) -> str:
ret = ''
if self.lock(0):
ret = self._get_bonus_url
self.unlock()
return ret
def clear_bonus_url(self):
self.lock()
self._get_bonus_url = None
self.unlock()
################################################################################
# this should re-calculate all user's object statuses
# like fleets in flight, buildings in construction,
# reserches in progress, etc, ...
def world_tick(self):
# This is called from GUI thread =(
self.lock()
self._world_tick_flights()
self._world_tick_planets()
self._maybe_refresh_overview()
self.unlock()
# just counts remaining time for flights,
# removes finished flights and emits signal
# 'flight_arrived' for every finished flight
def _world_tick_flights(self):
# logger.debug('tick: server time diff: {0}'.format(self._diff_with_server_time_secs)) # 0:00:16.390197
# iterate
finished_flights_count = 0
for fl in self._flights:
if fl.seconds_left == -1:
raise ValueError('Flight seconds left is None: {0}'.format(str(fl)))
fl.seconds_left -= 1
if fl.seconds_left <= 0:
fl.seconds_left = 0
logger.debug('==== Flight considered complete: {0}'.format(str(fl)))
# logger.debug('==== additional debug info:')
# logger.debug('==== - diff with server time: {0}'.format(self._diff_with_server_time_secs))
# logger.debug('==== - current time: {0}'.format(datetime.datetime.today()))
# logger.debug('==== - current server time: {0}'.format(self.get_current_server_time()))
finished_flights_count += 1
if finished_flights_count > 0:
# logger.debug('==== Removing total {0} arrived flights'.format(finished_flights_count))
for irow in range(finished_flights_count):
try:
# finished_flight = self._flights[irow]
# item-to-delete from python list will always have index 0?
# because we need to delete the first item every time
finished_flight = self._flights[0]
# del self._flights[0]
self._flights.remove(finished_flight)
# emit signal
self.flight_arrived.emit(finished_flight)
except IndexError:
# should never happen
logger.error('IndexError while clearing finished flights: ')
logger.error(' deleting index {0}, while total list len: {1}'.format(
0, len(self._flights)))
# end _world_tick_flights()
def _world_tick_planets(self):
"""
This should do the following:
- increase planet resources every second
- move planet buildings progress
:return: None
"""
for planet in self._planets:
# tick resources
# calc resource speed per second
mps = planet.res_per_hour.met / 3600
cps = planet.res_per_hour.cry / 3600
dps = planet.res_per_hour.deit / 3600
# add resource per second
planet.res_current.met += mps
planet.res_current.cry += cps
planet.res_current.deit += dps
# tick buildings in progress
num_completed = 0
for bitem in planet.buildings_items:
if bitem.is_in_progress():
bitem.seconds_left -= 1
if bitem.seconds_left <= 0:
bitem.seconds_left = -1
bitem.dt_end = None # mark as stopped
bitem.level += 1 # level increased
num_completed += 1
self.build_complete.emit(planet, bitem)
# tick shipyard builds in progress
todel_list = []
for bitem in planet.shipyard_progress_items:
bitem.seconds_left -= 1
if bitem.seconds_left <= 0:
todel_list.append(bitem)
break # only one (first) item is IN PROGRESS, others WAIT !!!
if len(todel_list) > 0:
for bitem in todel_list:
planet.shipyard_progress_items.remove(bitem)
self.build_complete.emit(planet, bitem)
# tick planet researches
num_completed = 0
for bitem in planet.research_items:
if bitem.is_in_progress():
bitem.seconds_left -= 1
if bitem.seconds_left <= 0:
bitem.seconds_left = -1
bitem.dt_end = None # mark as stopped
bitem.level += 1
num_completed += 1
self.build_complete.emit(planet, bitem)
# tick planet research_fleet
num_completed = 0
for bitem in planet.researchfleet_items:
if bitem.is_in_progress():
bitem.seconds_left -= 1
if bitem.seconds_left <= 0:
bitem.seconds_left = -1
bitem.dt_end = None # mark as stopped
bitem.level += 1
num_completed += 1
self.build_complete.emit(planet, bitem)
# end _world_tick_planets()
def _maybe_refresh_overview(self):
"""
Can trigger signal to refresh overview page every
'self._overview_update_interval' seconds.
Called from self._world_tick(), which holds the lock already
"""
if 'overview' in self._page_dnl_times:
dt_last = self._page_dnl_times['overview']
dt_now = datetime.datetime.today()
dt_diff = dt_now - dt_last
secs_ago = int(dt_diff.total_seconds())
if secs_ago >= self._overview_update_interval:
logger.debug('_maybe_refresh_overview() trigger update: '
'{0} secs ago.'.format(secs_ago))
self.signal(self.SIGNAL_RELOAD_PAGE, page_name='overview')
################################################################################
def on_page_downloaded(self, page_name: str):
logger.debug('on_page_downloaded( "{0}" ) tid={1}'.format(page_name, self._gettid_s()))
# cache has the page inside before the signal was emitted!
# we can get page content from cache
page_content = self._page_cache.get_page(page_name)
if page_content is None:
raise ValueError('This should not ever happen!')
# get current date/time
dt_now = datetime.datetime.today()
self._page_dnl_times[page_name] = dt_now # save last download time for page
# dispatch parser and merge data
if page_name == 'overview':
self._parser_overview.clear()
self._parser_overview.account = self._account # store previous info
self._parser_overview.parse_page_content(page_content)
self._account = self._parser_overview.account # get new info
self._flights = self._parser_overview.flights
# get server time also calculate time diff
self._server_time = self._parser_overview.server_time
dt_diff = dt_now - self._server_time
self._diff_with_server_time_secs = int(dt_diff.total_seconds())
self._new_messages_count = self._parser_overview.new_messages_count
self._vacation_mode = self._parser_overview.in_RO
self._server_online_players = self._parser_overview.online_players
self._get_bonus_url = self._parser_overview.bonus_url
# run also cur planet parser on the same content
self._parser_curplanet.parse_page_content(page_content)
self._cur_planet_id = self._parser_curplanet.cur_planet_id
self._cur_planet_name = self._parser_curplanet.cur_planet_name
self._cur_planet_coords = self._parser_curplanet.cur_planet_coords
self._internal_set_current_planet() # it may have changed
# emit signal that we've loaded overview, but not during world update
if not self._world_is_loading:
self.loaded_overview.emit()
elif page_name == 'self_user_info':
self._parser_userinfo.parse_page_content(page_content)
self._account.scores.buildings = self._parser_userinfo.buildings
self._account.scores.buildings_rank = self._parser_userinfo.buildings_rank
self._account.scores.fleet = self._parser_userinfo.fleet
self._account.scores.fleet_rank = self._parser_userinfo.fleet_rank
self._account.scores.defense = self._parser_userinfo.defense
self._account.scores.defense_rank = self._parser_userinfo.defense_rank
self._account.scores.science = self._parser_userinfo.science
self._account.scores.science_rank = self._parser_userinfo.science_rank
self._account.scores.total = self._parser_userinfo.total
self._account.scores.rank = self._parser_userinfo.rank
self._account.main_planet_name = self._parser_userinfo.main_planet_name
self._account.main_planet_coords = self._parser_userinfo.main_planet_coords
self._account.alliance_name = self._parser_userinfo.alliance_name
elif page_name == 'imperium':
self._parser_imperium.clear()
self._parser_imperium.parse_page_content(page_content)
self._planets = self._parser_imperium.planets
# since we've overwritten the whole planets array, we need to
# write current planet into it again
self._internal_set_current_planet()
# emit signal that we've loaded overview, but not during world update
if not self._world_is_loading:
self.loaded_imperium.emit()
elif page_name == 'techtree':
self._parser_techtree.clear()
self._parser_techtree.parse_page_content(page_content)
# store techtree, if there is successful parse of anything
if len(self._parser_techtree.techtree) > 0:
self._techtree.init_techtree(self._parser_techtree.techtree)
elif page_name == 'fleet':
self._parser_fleetmax.clear()
self._parser_fleetmax.parse_page_content(page_content)
self._cur_fleets_count = self._parser_fleetmax.fleets_cur
self._max_fleets_count = self._parser_fleetmax.fleets_max
elif page_name.startswith('buildings_'):
try:
m = re.match(r'buildings_(\d+)', page_name)
planet_id = int(m.group(1))
planet = self.get_planet(planet_id)
# get available buildings to build
self._parser_planet_buildings_avail.clear()
self._parser_planet_buildings_avail.parse_page_content(page_content)
# get buildings in progress on the same page
self._parser_planet_buildings_progress.clear()
self._parser_planet_buildings_progress.parse_page_content(page_content)
# get planet energy info, res cur/max/prod
self._parser_planet_energy.clear()
self._parser_planet_energy.parse_page_content(page_content)
if planet is not None:
planet.buildings_items = self._parser_planet_buildings_avail.builds_avail
num_added = len(self._parser_planet_buildings_progress.builds_in_progress)
if num_added > 0:
for bip in self._parser_planet_buildings_progress.builds_in_progress:
planet.add_build_in_progress(bip)
logger.debug('Buildings queue for planet {0}: added {1}'.format(planet.name, num_added))
# save planet energy info, do not overwite with zeros
if self._parser_planet_energy.energy_left > 0:
planet.energy.energy_left = self._parser_planet_energy.energy_left
if self._parser_planet_energy.energy_total > 0:
planet.energy.energy_total = self._parser_planet_energy.energy_total
# save planet resource info
if len(self._parser_planet_energy.res_current) > 0:
planet.res_current = self._parser_planet_energy.res_current
if len(self._parser_planet_energy.res_max_silos) > 0:
planet.res_max_silos = self._parser_planet_energy.res_max_silos
if len(self._parser_planet_energy.res_per_hour) > 0:
planet.res_per_hour = self._parser_planet_energy.res_per_hour
except ValueError: # failed to convert to int
logger.exception('Failed to convert planet_id to int, page_name=[{0}]'.format(page_name))
except AttributeError: # no match
logger.exception('Invalid format for page_name=[{0}], expected buildings_123456'.format(page_name))
elif page_name.startswith('shipyard_'):
try:
m = re.match(r'shipyard_(\d+)', page_name)
planet_id = int(m.group(1))
planet = self.get_planet(planet_id)
# go parse
self._parser_shipyard_ships_avail.clear()
self._parser_shipyard_ships_avail.parse_page_content(page_content)
self._parser_shipyard_progress.clear()
self._parser_shipyard_progress.server_time = self._server_time
self._parser_shipyard_progress.parse_page_content(page_content)
# get planet energy info
self._parser_planet_energy.clear()
self._parser_planet_energy.parse_page_content(page_content)
if planet is not None:
planet.shipyard_tems = self._parser_shipyard_ships_avail.ships_avail
planet.shipyard_progress_items = self._parser_shipyard_progress.shipyard_progress_items
if len(self._parser_shipyard_progress.shipyard_progress_items) > 0:
logger.debug('planet [{0}] has {1} items in shipyard queue'.format(
planet.name, len(self._parser_shipyard_progress.shipyard_progress_items)))
# save planet energy info, but do not overwrite with zeros
# if there is no shipyard @ planet, no energy info will be on the page =(
if self._parser_planet_energy.energy_left > 0:
planet.energy.energy_left = self._parser_planet_energy.energy_left
if self._parser_planet_energy.energy_total > 0:
planet.energy.energy_total = self._parser_planet_energy.energy_total
# save planet resource info
if len(self._parser_planet_energy.res_current) > 0:
planet.res_current = self._parser_planet_energy.res_current
if len(self._parser_planet_energy.res_max_silos) > 0:
planet.res_max_silos = self._parser_planet_energy.res_max_silos
if len(self._parser_planet_energy.res_per_hour) > 0:
planet.res_per_hour = self._parser_planet_energy.res_per_hour
except AttributeError: # no match
logger.exception('Invalid format for page_name=[{0}], expected shipyard_123456'.format(page_name))
except ValueError: # failed to convert to int
logger.exception('Failed to convert planet_id to int, page_name=[{0}]'.format(page_name))
elif page_name.startswith('defense_'):
try:
m = re.match(r'defense_(\d+)', page_name)
planet_id = int(m.group(1))
planet = self.get_planet(planet_id)
# go parse
self._parser_shipyard_ships_avail.clear()
self._parser_shipyard_ships_avail.parse_page_content(page_content)
self._parser_shipyard_progress.clear()
self._parser_shipyard_progress.server_time = self._server_time
self._parser_shipyard_progress.parse_page_content(page_content)
# get planet energy info
self._parser_planet_energy.clear()
self._parser_planet_energy.parse_page_content(page_content)
if planet is not None:
# shipyard parser ships_avail can also parse planet defenses avail
planet.defense_items = self._parser_shipyard_ships_avail.ships_avail
# even in defense page, ships build queue is the same as in shipyard page
planet.shipyard_progress_items = self._parser_shipyard_progress.shipyard_progress_items
if len(self._parser_shipyard_progress.shipyard_progress_items) > 0:
logger.debug('planet [{0}] has {0} items in shipyard queue'.format(
planet.name, len(self._parser_shipyard_progress.shipyard_progress_items)))
# save planet energy info, but do not overwrite with zeros
# if there is no shipyard @ planet, no energy info will be on the page =(
if self._parser_planet_energy.energy_left > 0:
planet.energy.energy_left = self._parser_planet_energy.energy_left
if self._parser_planet_energy.energy_total > 0:
planet.energy.energy_total = self._parser_planet_energy.energy_total
# save planet resource info
if len(self._parser_planet_energy.res_current) > 0:
planet.res_current = self._parser_planet_energy.res_current
if len(self._parser_planet_energy.res_max_silos) > 0:
planet.res_max_silos = self._parser_planet_energy.res_max_silos
if len(self._parser_planet_energy.res_per_hour) > 0:
planet.res_per_hour = self._parser_planet_energy.res_per_hour
except AttributeError: # no match
logger.exception('Invalid format for page_name=[{0}], expected defense_123456'.format(page_name))
except ValueError: # failed to convert to int
logger.exception('Failed to convert planet_id to int, page_name=[{0}]'.format(page_name))
elif page_name.startswith('research_'):
try:
m = re.match(r'research_(\d+)', page_name)
planet_id = int(m.group(1))
planet = self.get_planet(planet_id)
# go parse
self._parser_researches_avail.clear()
self._parser_researches_avail.server_time = self._server_time
self._parser_researches_avail.set_parsing_research_fleet(False)
self._parser_researches_avail.parse_page_content(page_content)
# get planet energy info
self._parser_planet_energy.clear()
self._parser_planet_energy.parse_page_content(page_content)
if planet is not None:
planet.research_items = self._parser_researches_avail.researches_avail
if len(self._parser_researches_avail.researches_avail) > 0:
logger.info('Planet {0} has {1} researches avail'.format(
planet.name, len(self._parser_researches_avail.researches_avail)))
# save planet energy info, but do not overwrite with zeros
# if there is no lab @ planet, no energy info will be on the page =(
if self._parser_planet_energy.energy_left > 0:
planet.energy.energy_left = self._parser_planet_energy.energy_left
if self._parser_planet_energy.energy_total > 0:
planet.energy.energy_total = self._parser_planet_energy.energy_total
# save planet resource info
if len(self._parser_planet_energy.res_current) > 0:
planet.res_current = self._parser_planet_energy.res_current
if len(self._parser_planet_energy.res_max_silos) > 0:
planet.res_max_silos = self._parser_planet_energy.res_max_silos
if len(self._parser_planet_energy.res_per_hour) > 0:
planet.res_per_hour = self._parser_planet_energy.res_per_hour
except AttributeError: # no match
logger.exception('Invalid format for page_name=[{0}], '
'expected research_123456'.format(page_name))
except ValueError: # failed to convert to int
logger.exception('Failed to convert planet_id to int, '
'page_name=[{0}]'.format(page_name))
elif page_name.startswith('researchfleet_'):
try:
m = re.match(r'researchfleet_(\d+)', page_name)
planet_id = int(m.group(1))
planet = self.get_planet(planet_id)
# go parse
self._parser_researches_avail.clear()
self._parser_researches_avail.server_time = self._server_time
self._parser_researches_avail.set_parsing_research_fleet(True)
self._parser_researches_avail.parse_page_content(page_content)
# get planet energy info
self._parser_planet_energy.clear()
self._parser_planet_energy.parse_page_content(page_content)
if planet is not None:
planet.researchfleet_items = self._parser_researches_avail.researches_avail
if len(self._parser_researches_avail.researches_avail) > 0:
logger.info('Planet {0} has {1} fleet researches avail'.format(
planet.name, len(self._parser_researches_avail.researches_avail)))
# save planet energy info, but do not overwrite with zeros
# if there is no lab @ planet, no energy info will be on the page =(
if self._parser_planet_energy.energy_left > 0:
planet.energy.energy_left = self._parser_planet_energy.energy_left
if self._parser_planet_energy.energy_total > 0:
planet.energy.energy_total = self._parser_planet_energy.energy_total
# save planet resource info
if len(self._parser_planet_energy.res_current) > 0:
planet.res_current = self._parser_planet_energy.res_current
if len(self._parser_planet_energy.res_max_silos) > 0:
planet.res_max_silos = self._parser_planet_energy.res_max_silos
if len(self._parser_planet_energy.res_per_hour) > 0:
planet.res_per_hour = self._parser_planet_energy.res_per_hour
except AttributeError: # no match
logger.exception('Invalid format for page_name=[{0}], '
'expected researchfleet_123456'.format(page_name))
except ValueError: # failed to convert to int
logger.exception('Failed to convert planet_id to int, '
'page_name=[{0}]'.format(page_name))
else:
logger.warn('on_page_downloaded(): Unhandled page name [{0}]. '
'This may be not a problem, but...'.format(page_name))
def on_signal_reload_page(self):
if 'page_name' in self._signal_kwargs:
page_name = self._signal_kwargs['page_name']
logger.debug('on_reload_page(): reloading {0}'.format(page_name))
self.lock()
self._get_page(page_name, max_cache_lifetime=1, force_download=True)
self.unlock()
def on_signal_rename_planet(self):
if ('planet_id' in self._signal_kwargs) and ('new_name' in self._signal_kwargs):
planet_id = int(self._signal_kwargs['planet_id'])
new_name = self._signal_kwargs['new_name']
# go go go
logger.debug('renaming planet #{0} to [{1}]'.format(planet_id, new_name))
self.lock()
# first need to ensure that this planet is current
self._download_planet_overview(planet_id, force_download=True)
# then trigger a rename operation
self._request_rename_planet(planet_id, new_name)
# force imperium update to read new planet name
self._get_page('imperium', 1, force_download=True)
self.unlock()
def on_signal_reload_planet(self):
if 'planet_id' in self._signal_kwargs:
planet_id = int(self._signal_kwargs['planet_id'])
logger.debug('reloading planet #{0}'.format(planet_id))
self.lock()
self._download_planet(planet_id, delays_msec=250, force_download=True)
self.unlock()
logger.debug('reload planet #{0} done'.format(planet_id))
def on_signal_get_url(self):
if 'url' in self._signal_kwargs:
url = self._signal_kwargs['url']
referer = None
if 'referer' in self._signal_kwargs:
referer = self._signal_kwargs['referer']
logger.debug('Got signal to load url: [{0}], referer=[{1}]'.format(
url, referer))
self.lock()
self._get_page_url(None, url, max_cache_lifetime=0,
force_download=True, referer=referer)
self.unlock()
def on_signal_test_parse_galaxy(self):
if ('galaxy' in self._signal_kwargs) and ('system' in self._signal_kwargs):
gal_no = self._signal_kwargs['galaxy']
sys_no = self._signal_kwargs['system']
logger.debug('downloading galaxy page {0},{1}'.format(gal_no, sys_no))
page_content = self._download_galaxy_page(gal_no, sys_no, force_download=True)
if page_content is not None:
gp = GalaxyParser()
gp.clear()
gp.parse_page_content(page_content)
logger.debug(gp.galaxy_rows)
def on_signal_build_item(self):
if ('bitem' in self._signal_kwargs) and ('planet_id' in self._signal_kwargs) \
and ('quantity' in self._signal_kwargs):
bitem = self._signal_kwargs['bitem']
planet_id = int(self._signal_kwargs['planet_id'])
quantity = int(self._signal_kwargs['quantity'])
self.lock()
# check that current planet is the same as requested to build on
# (it should be the same)
if self._cur_planet_id != planet_id:
logger.debug('Current planet ({}) is not {}, force '
'change current planet'.format(
self._cur_planet_id, planet_id))
self._download_planet_overview(planet_id, force_download=True)
self._request_build_item(planet_id, bitem, quantity)
self.unlock()
def on_signal_build_cancel(self):
if ('bitem' in self._signal_kwargs) and ('planet_id' in self._signal_kwargs):
bitem = self._signal_kwargs['bitem']
planet_id = int(self._signal_kwargs['planet_id'])
self.lock()
# check that current planet is the same as requested to build on
# (it should be the same)
if self._cur_planet_id != planet_id:
logger.debug('Current planet ({}) is not {}, force '
'change current planet'.format(
self._cur_planet_id, planet_id))
self._download_planet_overview(planet_id)
self._request_build_cancel(planet_id, bitem)
self.unlock()
def on_signal_build_dismantle(self):
if ('bitem' in self._signal_kwargs) and ('planet_id' in self._signal_kwargs):
bitem = self._signal_kwargs['bitem']
planet_id = int(self._signal_kwargs['planet_id'])
self.lock()
# check that current planet is the same as requested to build on
# (it should be the same)
if self._cur_planet_id != planet_id:
logger.debug('Current planet ({}) is not {}, force '
'change current planet'.format(
self._cur_planet_id, planet_id))
self._download_planet_overview(planet_id)
self._request_build_dismantle(planet_id, bitem)
self.unlock()
def _internal_set_current_planet(self):
"""
Just updates internal planets array with information
about which of them is current one
:return: None
"""
for pl in self._planets:
if pl.planet_id == self._cur_planet_id:
pl.is_current = True
else:
pl.is_current = False
def _inc_network_errors(self):
"""
Error handler, called when network error has occured,
when page could not be downloaded. Raises RuntimeError when
too many errors happened.
:return:
"""
# increase errors count
self._net_errors_count += 1
# store error text
if self._page_downloader.error_str != '':
self._net_errors_list.append(self._page_downloader.error_str)
logger.error('net error happened: [{0}], total errors count: {1}'.format(
self._page_downloader.error_str, self._net_errors_count))
if self._net_errors_count > self._NET_ERRORS_MAX:
raise RuntimeError('Too many network errors: {0}!'.format(self._net_errors_count))
# internal helper, converts page identifier to url path
def _page_name_to_url_path(self, page_name: str):
urls_dict = dict()
urls_dict['overview'] = '?set=overview'
urls_dict['imperium'] = '?set=imperium'
urls_dict['techtree'] = '?set=techtree'
urls_dict['fleet'] = '?set=fleet'
sub_url = None
if page_name in urls_dict:
return urls_dict[page_name]
elif page_name == 'self_user_info':
# special page case, dynamic URL, depends on user id
# http://uni4.xnova.su/?set=players&id=71995
if self._account.id == 0:
logger.warn('requested account info page, but account id is 0!')
return None
sub_url = '?set=players&id={0}'.format(self._account.id)
else:
logger.warn('unknown page name requested: {0}'.format(page_name))
return sub_url
def _get_page(self, page_name, max_cache_lifetime=None, force_download=False):
"""
Gets page from cache or from server only by page name.
Converts page_name to page URL, using _page_name_to_url_path().
First tries to get cached page from cache using page_name as key.
If there is no cached page there, or it is expired, downloads from network.
Then calls self.on_page_downloaded() to automatically parse requested page.
:param page_name: 'name' used as key in pages cache
:param max_cache_lifetime: cache timeout
:param force_download:
:return: page contents as str, or None on error
"""
page_url = self._page_name_to_url_path(page_name)
if not page_url:
logger.error('Failed to convert page_name=[{0}] to url!'.format(page_name))
return None
return self._get_page_url(page_name, page_url, max_cache_lifetime, force_download)
def _get_page_url(self, page_name, page_url,
max_cache_lifetime=None,
force_download=False,
referer=None):
"""
For internal needs, downloads url from server using HTTP GET.
First tries to get cached page from cache using page_name as key.
If there is no cached page there, or it is expired, downloads from network.
Then calls self.on_page_downloaded() to automatically parse requested page.
If force_download is True, max_cache_lifetime is ignored.
(This method's return value is ignored for now)
:param page_name: 'name' of page to use as key when stored to cache, if None - cache disabled
:param page_url: URL to download in HTTP GET request
:param max_cache_lifetime:
:param force_download:
:param referer: set this to str value to force Referer header before request
:return: page contents (str) or None on error
"""
page_content = None
if not force_download:
# try to get cached page (default)
page_content = self._page_cache.get_page(page_name, max_cache_lifetime)
if page_content is not None:
logger.debug('... got page "{0}" from cache! (lifetime < {1})'.format(
page_name, max_cache_lifetime))
if page_content is None:
# signal that we are starting network request
if not self._world_is_loading:
self.net_request_started.emit()
# set referer, if set
if referer is not None:
self._page_downloader.set_referer(referer)
# try to download
page_content = self._page_downloader.download_url_path(page_url)
# signal that we have finished network request
if not self._world_is_loading:
self.net_request_finished.emit()
# save in cache, only if content anf page_name is present
if (page_content is not None) and (page_name is not None):
self._page_cache.set_page(page_name, page_content)
# check for download error
if page_content is None: # download error happened
self._inc_network_errors()
# parse page content independently if it was read from cache or by network from server
if (page_content is not None) and (page_name is not None):
self.on_page_downloaded(page_name) # process downloaded page
return page_content
def _post_page_url(self, page_url: str, post_data: dict=None, referer: str=None):
"""
For internal needs, sends a POST request, and handles possible error returns
:param page_url: URL to send HTTP POST to
:param post_data: dict with post data key-value pairs
:param referer: if set, use this as value for HTTP Referer header
:return: response content, or None on error
"""
# signal that we are starting network request
if not self._world_is_loading:
self.net_request_started.emit()
page_content = self._page_downloader.post(page_url, post_data=post_data, referer=referer)
# signal that we have finished network request
if not self._world_is_loading:
self.net_request_finished.emit()
# handle errors
if page_content is None:
self._inc_network_errors()
return page_content
def _download_galaxy_page(self, galaxy_no, sys_no, force_download=False):
# 'http://uni4.xnova.su/?set=galaxy&r=3&galaxy=3&system=130'
page_url = '?set=galaxy&r=3&galaxy={0}&system={1}'.format(galaxy_no, sys_no)
page_name = 'galaxy_{0}_{1}'.format(galaxy_no, sys_no)
# if force_download is True, cache_lifetime is ignored
return self._get_page_url(page_name, page_url,
self._galaxy_cache_lifetime, force_download)
def _download_image(self, img_path: str):
img_bytes = self._page_downloader.download_url_path(img_path, return_binary=True)
if img_bytes is None:
logger.error('image dnl failed: [{0}]'.format(img_path))
self._inc_network_errors()
return
self._page_cache.save_image(img_path, img_bytes)
def _download_planet_overview(self, planet_id: int, force_download=False):
# url to change current planet is:
# http://uni4.xnova.su/?set=overview&cp=60668&re=0
page_url = '?set=overview&cp={0}&re=0'.format(planet_id)
page_name = 'overview'
return self._get_page_url(page_name, page_url, 1, force_download)
def _download_planet_buildings(self, planet_id: int, force_download=False):
page_url = '?set=buildings&cp={0}&re=0'.format(planet_id)
page_name = 'buildings_{0}'.format(planet_id)
return self._get_page_url(page_name, page_url,
self._planet_buildings_cache_lifetime, force_download)
def _download_planet_shipyard(self, planet_id: int, force_download=False):
# url to change current planet is:
# http://uni4.xnova.su/?set=buildings&mode=fleet&cp=60668&re=0
page_url = '?set=buildings&mode=fleet&cp={0}&re=0'.format(planet_id)
page_name = 'shipyard_{0}'.format(planet_id)
return self._get_page_url(page_name, page_url,
self._planet_shipyard_cache_lifetime, force_download)
def _download_planet_defense(self, planet_id: int, force_download=False):
# url to change current planet is:
# http://uni4.xnova.su/?set=buildings&mode=defense&cp=60668&re=0
page_url = '?set=buildings&mode=defense&cp={0}&re=0'.format(planet_id)
page_name = 'defense_{0}'.format(planet_id)
return self._get_page_url(page_name, page_url,
self._planet_shipyard_cache_lifetime, force_download)
def _download_planet_researches(self, planet_id: int, force_download=False):
# url: http://uni4.xnova.su/?set=buildings&mode=research&cp=57064&re=0
page_url = '?set=buildings&mode=research&cp={0}&re=0'.format(planet_id)
page_name = 'research_{0}'.format(planet_id)
return self._get_page_url(page_name, page_url,
self._planet_research_cache_lifetime, force_download)
def _download_planet_researches_fleet(self, planet_id: int, force_download=False):
# url: http://uni4.xnova.su/?set=buildings&mode=research_fleet&cp=57064&re=0
page_url = '?set=buildings&mode=research_fleet&cp={0}&re=0'.format(planet_id)
page_name = 'researchfleet_{0}'.format(planet_id)
return self._get_page_url(page_name, page_url,
self._planet_research_cache_lifetime, force_download)
def _download_planet(self, planet_id: int, delays_msec: int=None, force_download: bool=False):
# planet buildings in progress
self._download_planet_buildings(planet_id, force_download)
if delays_msec is not None:
self.msleep(delays_msec)
# planet researches and in progress
self._download_planet_researches(planet_id, force_download)
if delays_msec is not None:
self.msleep(delays_msec)
# planet factory researches and in progress
self._download_planet_researches_fleet(planet_id, force_download)
if delays_msec is not None:
self.msleep(delays_msec)
# planet shipyard/defense builds in progress
self._download_planet_shipyard(planet_id, force_download)
if delays_msec is not None:
self.msleep(delays_msec)
self._download_planet_defense(planet_id, force_download)
if delays_msec is not None:
self.msleep(delays_msec)
if not self._world_is_loading:
self.loaded_planet.emit(planet_id)
def _request_rename_planet(self, planet_id: int, new_name: str):
post_url = '?set=overview&mode=renameplanet&pl={0}'.format(planet_id)
post_data = dict()
post_data['action'] = 'Сменить название'
post_data['newname'] = new_name
referer = 'http://{0}/?set=overview&mode=renameplanet'.format(self._page_downloader.xnova_url)
self._post_page_url(post_url, post_data, referer)
logger.debug('Rename planet to [{0}] complete'.format(new_name))
def _request_build_item(self, planet_id: int, bitem: XNPlanetBuildingItem, quantity: int):
logger.debug('Request to build: {0} lv {1} x {2} on planet {3}, build_link = [{4}]'.format(
bitem.name, bitem.level+1, quantity, planet_id, bitem.build_link))
if bitem.is_building_item or bitem.is_research_item or bitem.is_researchfleet_item:
if bitem.build_link is None or (bitem.build_link == ''):
logger.warn('bitem build_link is empty, cannot build!')
return
# construct page name and referer
# successful request to build item redirects to buildings page
page_name = None
referer = ''
if bitem.is_building_item:
page_name = 'buildings_{0}'.format(planet_id)
referer = 'http://{0}/?set=buildings'.format(
self._page_downloader.xnova_url)
elif bitem.is_research_item:
page_name = 'research_{0}'.format(planet_id)
referer = 'http://{0}/?set=buildings&mode=research'.format(
self._page_downloader.xnova_url)
elif bitem.is_researchfleet_item:
referer = 'http://{0}/?set=buildings&mode=research_fleet'.format(
self._page_downloader.xnova_url)
page_name = 'researchfleet_{0}'.format(planet_id)
# send request
self._get_page_url(page_name, bitem.build_link,
max_cache_lifetime=0, force_download=True,
referer=referer)
elif bitem.is_shipyard_item:
logger.debug('Build shipyard item {0} x {1}, gid={2}'.format(
bitem.name, bitem.quantity, bitem.gid))
if bitem.gid <= 0:
logger.warn('Invalid bitem gid: {0}! Skippping!'.format(bitem.gid))
return
if quantity <= 0:
logger.warn('Invalid quantity: {0}! Skippping!'.format(quantity))
return
post_url = '?set=buildings&mode=fleet'
post_data = dict()
param_name = 'fmenge[{0}]'.format(bitem.gid)
post_data[param_name] = quantity
referer = 'http://{0}/?set=buildings&mode=fleet'.format(
self._page_downloader.xnova_url)
self._post_page_url(post_url, post_data, referer)
# automatically download planet shipyard after this
self._download_planet_shipyard(planet_id, force_download=True)
def _request_build_cancel(self, planet_id: int, bitem: XNPlanetBuildingItem):
logger.debug('Request to cancel build: {0} on planet {1}, remove_link = [{2}]'.format(
bitem.name, planet_id, bitem.remove_link))
if bitem.is_building_item or bitem.is_research_item or bitem.is_researchfleet_item:
if bitem.remove_link is None or (bitem.remove_link == ''):
logger.warn('bitem remove_link is empty, cannot cancel build!')
return
# construct page name and referer
# successful request to cancel build item redirects to buildings page
page_name = None
referer = ''
if bitem.is_building_item:
page_name = 'buildings_{0}'.format(planet_id)
referer = '?set=buildings'
elif bitem.is_research_item:
page_name = 'research_{0}'.format(planet_id)
referer = '?set=buildings&mode=research'
elif bitem.is_researchfleet_item:
page_name = 'researchfleet_{0}'.format(planet_id)
referer = '?set=buildings&mode=research_fleet'
# send request
self._get_page_url(page_name, bitem.remove_link,
max_cache_lifetime=0, force_download=True,
referer=referer)
else:
logger.warn('Cannot cancel shipyard item: {0}'.format(bitem))
def _request_build_dismantle(self, planet_id: int, bitem: XNPlanetBuildingItem):
logger.debug('Request to downgrade building: {0} on planet {1}, dismantle_link = [{2}]'.format(
bitem.name, planet_id, bitem.dismantle_link))
if bitem.is_building_item:
if bitem.dismantle_link is None or (bitem.dismantle_link == ''):
logger.warn('bitem dismantle_link is empty, cannot dismantle build!')
return
# construct page name and referer
# successful request to cancel build item redirects to buildings page
page_name = 'buildings_{0}'.format(planet_id)
referer = '?set=buildings'
# send request
self._get_page_url(page_name, bitem.dismantle_link,
max_cache_lifetime=0, force_download=True,
referer=referer)
else:
logger.warn('Can only dismantle buildings items! bitem={0}'.format(bitem))
# internal, called from thread on first load
def _full_refresh(self):
logger.info('thread: starting full world update')
# full refresh always downloads all pages, ignoring cache
self.lock()
self._world_is_loading = True
# load all pages that contain useful information
load_progress_percent = 0
load_progress_step = 5
pages_list = ['techtree', 'overview', 'imperium', 'fleet']
pages_maxtime = [3600, 60, 60, 60] # pages' expiration time in cache
for i in range(0, len(pages_list)):
page_name = pages_list[i]
page_time = pages_maxtime[i]
self.world_load_progress.emit(page_name, load_progress_percent)
self._get_page(page_name, max_cache_lifetime=page_time, force_download=False)
self.msleep(100) # delay before requesting next page
load_progress_percent += load_progress_step
#
# additionally request user info page, constructed as:
# http://uni4.xnova.su/?set=players&id=71995
# This need overview parser to parse and fetch account id
self.world_load_progress.emit('self_user_info', load_progress_percent)
self._get_page('self_user_info', max_cache_lifetime=60, force_download=False)
load_progress_percent += load_progress_step
#
# download all planets info
load_progress_left = 100 - load_progress_percent
if len(self._planets) > 0:
load_progress_step = load_progress_left // len(self._planets)
else:
load_progress_step = 1
for pl in self._planets:
self.world_load_progress.emit(self.tr('Planet') + ' ' + pl.name, load_progress_percent)
load_progress_percent += load_progress_step
# planet image
self._download_image(pl.pic_url)
self.msleep(100) # wait 100 ms
# all other planet items
self._download_planet(pl.planet_id, delays_msec=100, force_download=True)
self.msleep(100)
# restore original current planet that was before full world refresh
# because world refresh changes it by loading every planet
logger.info('Restoring current planet to #{0} ({1})'.format(self._cur_planet_id, self._cur_planet_name))
self._download_planet_overview(self._cur_planet_id, force_download=True)
self._world_is_loading = False
self.unlock() # unlock before emitting any signal, just for a case...
#
# signal wain window that we fifnished initial loading
self.world_load_complete.emit()
@staticmethod
def _gettid():
sip_voidptr = QThread.currentThreadId()
return int(sip_voidptr)
def _gettid_s(self):
"""
Get thread ID as descriptive string
:return: 'gui' if called from main GUI thread, 'network' if from net bg thread
"""
tid = self._gettid()
if tid == self._maintid:
return 'gui'
if tid == self._worldtid:
return 'network'
return 'unknown_' + str(tid)
def run(self):
"""
Main thread function, lives in Qt event loop to receive/send Qt events
:return: cannot return any value, including None
"""
self._worldtid = self._gettid()
# start new life from full downloading of current server state
self._full_refresh()
ret = -1
while ret != self.SIGNAL_QUIT:
# logger.debug('thread: entering Qt event loop, tid={0}'.format(self._worldtid))
ret = self.exec() # enter Qt event loop to receive events
# logger.debug('thread: Qt event loop ended with code {0}'.format(ret))
# parse event loop's return value
if ret == self.SIGNAL_QUIT:
break
if ret == self.SIGNAL_RELOAD_PAGE:
self.on_signal_reload_page()
elif ret == self.SIGNAL_RENAME_PLANET:
self.on_signal_rename_planet()
elif ret == self.SIGNAL_RELOAD_PLANET:
self.on_signal_reload_planet()
elif ret == self.SIGNAL_BUILD_ITEM:
self.on_signal_build_item()
elif ret == self.SIGNAL_BUILD_CANCEL:
self.on_signal_build_cancel()
elif ret == self.SIGNAL_BUILD_DISMANTLE:
self.on_signal_build_dismantle()
elif ret == self.SIGNAL_GET_URL:
self.on_signal_get_url()
elif ret == self.SIGNAL_TEST_PARSE_GALAXY:
self.on_signal_test_parse_galaxy()
#
# clear signal arguments after handler
self._signal_kwargs = dict()
logger.debug('thread: exiting.')
# only one instance of XNovaWorld should be!
# well, there may be others, but for coordination it should be one
_singleton_XNovaWorld = None
# Factory
# Serves as singleton entry-point to get world class instance
def XNovaWorld_instance() -> XNovaWorld:
global _singleton_XNovaWorld
if not _singleton_XNovaWorld:
_singleton_XNovaWorld = XNovaWorld()
return _singleton_XNovaWorld
|
k11a/snmpconverter | refs/heads/master | pyasn1/type/useful.py | 8 | #
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import datetime
from pyasn1.type import univ, char, tag
from pyasn1.compat import string, dateandtime
from pyasn1 import error
__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime']
NoValue = univ.NoValue
noValue = univ.noValue
class ObjectDescriptor(char.GraphicString):
__doc__ = char.GraphicString.__doc__
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
tagSet = char.GraphicString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
)
# Optimization for faster codec lookup
typeId = char.GraphicString.getTypeId()
class TimeMixIn(object):
_yearsDigits = 4
_hasSubsecond = False
_optionalMinutes = False
_shortTZ = False
class FixedOffset(datetime.tzinfo):
"""Fixed offset in minutes east from UTC."""
# defaulted arguments required
# https: // docs.python.org / 2.3 / lib / datetime - tzinfo.html
def __init__(self, offset=0, name='UTC'):
self.__offset = datetime.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return datetime.timedelta(0)
UTC = FixedOffset()
@property
def asDateTime(self):
"""Create :py:class:`datetime.datetime` object from a |ASN.1| object.
Returns
-------
:
new instance of :py:class:`datetime.datetime` object
"""
text = str(self)
if text.endswith('Z'):
tzinfo = TimeMixIn.UTC
text = text[:-1]
elif '-' in text or '+' in text:
if '+' in text:
text, plusminus, tz = string.partition(text, '+')
else:
text, plusminus, tz = string.partition(text, '-')
if self._shortTZ and len(tz) == 2:
tz += '00'
if len(tz) != 4:
raise error.PyAsn1Error('malformed time zone offset %s' % tz)
try:
minutes = int(tz[:2]) * 60 + int(tz[2:])
if plusminus == '-':
minutes *= -1
except ValueError:
raise error.PyAsn1Error('unknown time specification %s' % self)
tzinfo = TimeMixIn.FixedOffset(minutes, '?')
else:
tzinfo = None
if '.' in text or ',' in text:
if '.' in text:
text, _, ms = string.partition(text, '.')
else:
text, _, ms = string.partition(text, ',')
try:
ms = int(ms) * 10000
except ValueError:
raise error.PyAsn1Error('bad sub-second time specification %s' % self)
else:
ms = 0
if self._optionalMinutes and len(text) - self._yearsDigits == 6:
text += '0000'
elif len(text) - self._yearsDigits == 8:
text += '00'
try:
dt = dateandtime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
except ValueError:
raise error.PyAsn1Error('malformed datetime format %s' % self)
return dt.replace(microsecond=ms, tzinfo=tzinfo)
@classmethod
def fromDateTime(cls, dt):
"""Create |ASN.1| object from a :py:class:`datetime.datetime` object.
Parameters
----------
dt : :py:class:`datetime.datetime` object
The `datetime.datetime` object to initialize the |ASN.1| object from
Returns
-------
:
new instance of |ASN.1| value
"""
text = dt.strftime(cls._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
if cls._hasSubsecond:
text += '.%d' % (dt.microsecond // 10000)
if dt.utcoffset():
seconds = dt.utcoffset().seconds
if seconds < 0:
text += '-'
else:
text += '+'
text += '%.2d%.2d' % (seconds // 3600, seconds % 3600)
else:
text += 'Z'
return cls(text)
class GeneralizedTime(char.VisibleString, TimeMixIn):
__doc__ = char.VisibleString.__doc__
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
tagSet = char.VisibleString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
)
# Optimization for faster codec lookup
typeId = char.VideotexString.getTypeId()
_yearsDigits = 4
_hasSubsecond = True
_optionalMinutes = True
_shortTZ = True
class UTCTime(char.VisibleString, TimeMixIn):
__doc__ = char.VisibleString.__doc__
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
tagSet = char.VisibleString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
)
# Optimization for faster codec lookup
typeId = char.VideotexString.getTypeId()
_yearsDigits = 2
_hasSubsecond = False
_optionalMinutes = False
_shortTZ = False
|
gspilio/nova | refs/heads/master | nova/tests/api/openstack/__init__.py | 143 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
SurpriseTRex/nuke-quick-import | refs/heads/master | projectbin.py | 1 | import nuke
import os
import re
from PySide import QtGui
from PySide import QtCore
from nukescripts import panels
class QuickReadBrowser(QtGui.QMainWindow):
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.sequence_list = []
self.nuke_file_path = "/".join(nuke.root()["name"].value().split("/")[:-1:])
self.image_filters = ["*.jpg*",
"*.jpeg*",
"*.png*",
"*.tga*",
"*.exr*",
"*.tif*",
"*.tiff*",
"*.psd*",
"*.hdri*",
"*.hdr*",
"*.cin*",
"*.dpx*"]
# Declare Models.
self.dir_model = QtGui.QFileSystemModel()
self.file_model = QtGui.QFileSystemModel()
self.seq_model = QtGui.QStringListModel(self.sequence_list)
# Declare widgets.
self.dir_view = QtGui.QTreeView()
self.file_view = QtGui.QListView()
self.text_edit = QtGui.QLineEdit()
self.import_button = QtGui.QPushButton("Open")
self.up_button = QtGui.QPushButton("Up")
self.seq_box = QtGui.QCheckBox("Image Sequence Mode")
self.inst_label = QtGui.QLabel("Note: Image sequence names must contain no 0-9 digits except for those which "
"dictate frame numbers.")
# Define a central widget and its layout.
self.center_widget = QtGui.QWidget()
self.lg = QtGui.QGridLayout()
self.build_models()
self.add_widgets()
self.setup_connections()
self.build_ui()
def build_ui(self):
self.resize(600, 350)
self.setWindowTitle("Nuke 'Quick-Read' Importer")
# Assign central widget.
self.setCentralWidget(self.center_widget)
self.center_widget.setLayout(self.lg)
self.inst_label.setWordWrap(True)
self.import_button.setToolTip("Imports the selected file as a Read node.")
self.seq_box.setToolTip("If this is checked, import any of the files in the sequence to import the "
"entire sequence.")
self.text_edit.setText(self.nuke_file_path)
self.show()
def build_models(self):
""" Sets up fileSystemModels and their links to views. """
self.dir_model.setRootPath("")
self.file_model.setRootPath("")
# Filters.
self.dir_model.setFilter(QtCore.QDir.AllDirs | QtCore.QDir.Dirs | QtCore.QDir.NoDotAndDotDot)
self.file_model.setFilter(self.file_model.filter() | QtCore.QDir.Files | QtCore.QDir.NoDotAndDotDot)
self.file_model.setNameFilters(self.image_filters)
self.file_model.setNameFilterDisables(False)
# Associate Views/Models.
self.dir_view.setModel(self.dir_model)
self.file_view.setModel(self.file_model)
# Set initial indexes.
self.dir_view.setCurrentIndex(self.dir_model.index(self.nuke_file_path))
self.file_view.setRootIndex(self.file_model.index(self.nuke_file_path))
# Hide all but Name column.
self.dir_view.hideColumn(1)
self.dir_view.hideColumn(2)
self.dir_view.hideColumn(3)
def add_widgets(self):
# from row, from column, row span, column span.
self.lg.addWidget(self.up_button, 0, 1, 1, 1)
self.lg.addWidget(self.seq_box, 0, 2, 1, 1)
self.lg.addWidget(self.dir_view, 1, 0, 1, 2)
self.lg.addWidget(self.file_view, 1, 2, 1, 8)
self.lg.addWidget(self.text_edit, 2, 0, 1, 8)
self.lg.addWidget(self.import_button, 2, 8, 1, 2)
self.lg.addWidget(self.inst_label, 3, 0, 2, 10)
def setup_connections(self):
""" Sets up input connections from different parts of the interface, to the appropriate methods. """
self.dir_view.clicked.connect(self.update_from_tree_click)
self.file_view.clicked.connect(self.update_from_list_click)
self.file_view.doubleClicked.connect(self.import_to_read_node)
self.text_edit.editingFinished.connect(self.update_from_text_entry)
self.import_button.clicked.connect(self.import_to_read_node)
self.up_button.clicked.connect(self.up_directory)
self.seq_box.stateChanged.connect(self.sequence_toggle)
def update_from_tree_click(self):
if not self.seq_box.isChecked():
self.text_edit.setText(self.dir_model.filePath(self.dir_view.selectedIndexes()[0]))
self.file_view.setRootIndex(self.file_model.index(self.dir_model.filePath(self.dir_view.selectedIndexes()[0])))
self.string_list_refresh()
def update_from_list_click(self):
if not self.seq_box.isChecked():
self.text_edit.setText(self.file_model.filePath(self.file_view.selectedIndexes()[0]))
self.string_list_refresh()
def update_from_text_entry(self):
# Updates both tree and list views with a typed file path.
if not self.seq_box.isChecked():
if self.text_edit.text():
self.dir_view.setCurrentIndex(self.dir_model.index(self.text_edit.text()))
self.file_view.setRootIndex(self.file_model.index(self.text_edit.text()))
self.string_list_refresh()
def up_directory(self):
""" Provides a 'parent directory' button functionality. """
parent_path = "/".join(self.file_model.filePath(self.file_view.rootIndex()).split("/")[:-1:])
self.file_view.setRootIndex(self.file_model.index(parent_path))
self.text_edit.setText(parent_path)
self.dir_view.setCurrentIndex(self.dir_model.index(parent_path))
self.string_list_refresh()
def import_to_read_node(self):
""" Handles final importing and directory navigation through the listView. """
if self.seq_box.isChecked():
file_path = self.text_edit.text()
if re.search('\.....?', file_path):
node = nuke.nodes.Read(file="/".join(file_path.split("/")[:-1:]) + "/" +
self.seq_model.data(self.file_view.selectedIndexes()[0], 0))
else:
node = nuke.nodes.Read(file=file_path + "/" + self.seq_model.data(self.file_view.selectedIndexes()[0], 0))
range_strip = re.compile('\D')
first = range_strip.sub('', self.scan_folder_sequences()[1][0])
last = range_strip.sub('', self.scan_folder_sequences()[1][-1])
node.knob("first").setValue(int(first))
node.knob("last").setValue(int(last))
else:
file_path = self.file_model.filePath(self.file_view.selectedIndexes()[0])
if self.file_model.isDir(self.file_view.selectedIndexes()[0]): # Handle opening a directory.
# Converts from file_view index to dir_model index.
dir_view_target_index = self.dir_model.index(self.dir_model.filePath(self.file_view.selectedIndexes()[0]))
self.dir_view.setCurrentIndex(dir_view_target_index)
self.file_view.setRootIndex(self.file_view.selectedIndexes()[0])
else: # Load single image.
nuke.nodes.Read(file=file_path)
def sequence_toggle(self):
""" Handles the shift from showing files in folder to a list of Strings showing sequence names. """
if self.seq_box.isChecked():
self.string_list_refresh()
# Change from a FileSystemModel to a StringListModel with names of sequences.
self.file_view.setModel(self.seq_model)
else:
# Change back to the standard FileSystemModel.
self.file_view.setModel(self.file_model)
self.file_view.setRootIndex(
self.file_model.index(
self.dir_model.filePath(
self.dir_view.selectedIndexes()[0])))
def repl_regex(self, str):
""" Functions as a find/replace for certain regex operations with unspecified frame number lengths. """
return "#" * len(str.group())
def get_folder_contents(self, file_path):
return [f for f in os.listdir(file_path) if os.path.isfile(os.path.join(file_path, f))]
def scan_folder_sequences(self):
""" Returns a list of lists. filtered is a list of only the sequences in the specified path, first_last is
a list of individual frames of the sequence. """
file_path = self.dir_model.filePath(self.dir_view.selectedIndexes()[0])
files_in_folder = self.get_folder_contents(file_path)
filtered = []
first_last = []
for each in files_in_folder:
if re.search('[0-9]{2,}', each):
first_last.append(each)
r = re.sub('[0-9]{2,}', self.repl_regex, each)
if r not in filtered:
filtered.append(r)
return [filtered, first_last]
def string_list_refresh(self):
""" Refresh the StringListModel. """
sequence_list = self.scan_folder_sequences()[0]
self.seq_model = QtGui.QStringListModel(sequence_list)
#window = QuickReadBrowser()
panels.registerWidgetAsPanel("QuickReadBrowser", "QuickRead Node Importer", "uk.co.seanjvfx.NukeQuickReadImporter")
|
txm/potato | refs/heads/master | django/http/utils.py | 200 | """
Functions that modify an HTTP request or response in some way.
"""
# This group of functions are run as part of the response handling, after
# everything else, including all response middleware. Think of them as
# "compulsory response middleware". Be careful about what goes here, because
# it's a little fiddly to override this behavior, so they should be truly
# universally applicable.
def fix_location_header(request, response):
"""
Ensures that we always use an absolute URI in any location header in the
response. This is required by RFC 2616, section 14.30.
Code constructing response objects is free to insert relative paths, as
this function converts them to absolute paths.
"""
if 'Location' in response and request.get_host():
response['Location'] = request.build_absolute_uri(response['Location'])
return response
def conditional_content_removal(request, response):
"""
Removes the content of responses for HEAD requests, 1xx, 204 and 304
responses. Ensures compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
response.content = ''
response['Content-Length'] = 0
if request.method == 'HEAD':
response.content = ''
return response
def fix_IE_for_attach(request, response):
"""
This function will prevent Django from serving a Content-Disposition header
while expecting the browser to cache it (only when the browser is IE). This
leads to IE not allowing the client to download.
"""
useragent = request.META.get('HTTP_USER_AGENT', '').upper()
if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
return response
offending_headers = ('no-cache', 'no-store')
if response.has_header('Content-Disposition'):
try:
del response['Pragma']
except KeyError:
pass
if response.has_header('Cache-Control'):
cache_control_values = [value.strip() for value in
response['Cache-Control'].split(',')
if value.strip().lower() not in offending_headers]
if not len(cache_control_values):
del response['Cache-Control']
else:
response['Cache-Control'] = ', '.join(cache_control_values)
return response
def fix_IE_for_vary(request, response):
"""
This function will fix the bug reported at
http://support.microsoft.com/kb/824847/en-us?spid=8722&sid=global
by clearing the Vary header whenever the mime-type is not safe
enough for Internet Explorer to handle. Poor thing.
"""
useragent = request.META.get('HTTP_USER_AGENT', '').upper()
if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
return response
# These mime-types that are decreed "Vary-safe" for IE:
safe_mime_types = ('text/html', 'text/plain', 'text/sgml')
# The first part of the Content-Type field will be the MIME type,
# everything after ';', such as character-set, can be ignored.
if response['Content-Type'].split(';')[0] not in safe_mime_types:
try:
del response['Vary']
except KeyError:
pass
return response
|
georgemarshall/django | refs/heads/master | tests/template_tests/syntax_tests/i18n/test_filters.py | 133 | from django.test import SimpleTestCase
from django.utils import translation
from ...utils import setup
class I18nFiltersTests(SimpleTestCase):
libraries = {
'custom': 'template_tests.templatetags.custom',
'i18n': 'django.templatetags.i18n',
}
@setup({'i18n32': '{% load i18n %}{{ "hu"|language_name }} '
'{{ "hu"|language_name_local }} {{ "hu"|language_bidi }} '
'{{ "hu"|language_name_translated }}'})
def test_i18n32(self):
output = self.engine.render_to_string('i18n32')
self.assertEqual(output, 'Hungarian Magyar False Hungarian')
with translation.override('cs'):
output = self.engine.render_to_string('i18n32')
self.assertEqual(output, 'Hungarian Magyar False maďarsky')
@setup({'i18n33': '{% load i18n %}'
'{{ langcode|language_name }} {{ langcode|language_name_local }} '
'{{ langcode|language_bidi }} {{ langcode|language_name_translated }}'})
def test_i18n33(self):
output = self.engine.render_to_string('i18n33', {'langcode': 'nl'})
self.assertEqual(output, 'Dutch Nederlands False Dutch')
with translation.override('cs'):
output = self.engine.render_to_string('i18n33', {'langcode': 'nl'})
self.assertEqual(output, 'Dutch Nederlands False nizozemsky')
@setup({'i18n38_2': '{% load i18n custom %}'
'{% get_language_info_list for langcodes|noop:"x y" as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }}/{{ l.name_translated }} '
'bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n38_2(self):
with translation.override('cs'):
output = self.engine.render_to_string('i18n38_2', {'langcodes': ['it', 'fr']})
self.assertEqual(
output,
'it: Italian/italiano/italsky bidi=False; '
'fr: French/français/francouzsky bidi=False; '
)
|
dantebarba/docker-media-server | refs/heads/master | plex/Sub-Zero.bundle/Contents/Libraries/Shared/requests_toolbelt/auth/http_proxy_digest.py | 3 | # -*- coding: utf-8 -*-
"""The module containing HTTPProxyDigestAuth."""
import re
from requests import cookies, utils
from . import _digest_auth_compat as auth
class HTTPProxyDigestAuth(auth.HTTPDigestAuth):
"""HTTP digest authentication between proxy
:param stale_rejects: The number of rejects indicate that:
the client may wish to simply retry the request
with a new encrypted response, without reprompting the user for a
new username and password. i.e., retry build_digest_header
:type stale_rejects: int
"""
_pat = re.compile(r'digest ', flags=re.IGNORECASE)
def __init__(self, *args, **kwargs):
super(HTTPProxyDigestAuth, self).__init__(*args, **kwargs)
self.stale_rejects = 0
self.init_per_thread_state()
@property
def stale_rejects(self):
thread_local = getattr(self, '_thread_local', None)
if thread_local is None:
return self._stale_rejects
return thread_local.stale_rejects
@stale_rejects.setter
def stale_rejects(self, value):
thread_local = getattr(self, '_thread_local', None)
if thread_local is None:
self._stale_rejects = value
else:
thread_local.stale_rejects = value
def init_per_thread_state(self):
try:
super(HTTPProxyDigestAuth, self).init_per_thread_state()
except AttributeError:
# If we're not on requests 2.8.0+ this method does not exist
pass
def handle_407(self, r, **kwargs):
"""Handle HTTP 407 only once, otherwise give up
:param r: current response
:returns: responses, along with the new response
"""
if r.status_code == 407 and self.stale_rejects < 2:
s_auth = r.headers.get("proxy-authenticate")
if s_auth is None:
raise IOError(
"proxy server violated RFC 7235:"
"407 response MUST contain header proxy-authenticate")
elif not self._pat.match(s_auth):
return r
self.chal = utils.parse_dict_header(
self._pat.sub('', s_auth, count=1))
# if we present the user/passwd and still get rejected
# https://tools.ietf.org/html/rfc2617#section-3.2.1
if ('Proxy-Authorization' in r.request.headers and
'stale' in self.chal):
if self.chal['stale'].lower() == 'true': # try again
self.stale_rejects += 1
# wrong user/passwd
elif self.chal['stale'].lower() == 'false':
raise IOError("User or password is invalid")
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.close()
prep = r.request.copy()
cookies.extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Proxy-Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
else: # give up authenticate
return r
def __call__(self, r):
self.init_per_thread_state()
# if we have nonce, then just use it, otherwise server will tell us
if self.last_nonce:
r.headers['Proxy-Authorization'] = self.build_digest_header(
r.method, r.url
)
r.register_hook('response', self.handle_407)
return r
|
aronsky/home-assistant | refs/heads/dev | tests/components/sensor/test_rmvtransport.py | 14 | """The tests for the rmvtransport platform."""
import datetime
from unittest.mock import patch
from homeassistant.setup import async_setup_component
from tests.common import mock_coro
VALID_CONFIG_MINIMAL = {'sensor': {
'platform': 'rmvtransport',
'next_departure': [
{'station': '3000010'}
]}}
VALID_CONFIG_NAME = {'sensor': {
'platform': 'rmvtransport',
'next_departure': [
{
'station': '3000010',
'name': 'My Station',
}
]}}
VALID_CONFIG_MISC = {'sensor': {
'platform': 'rmvtransport',
'next_departure': [
{
'station': '3000010',
'lines': [21, 'S8'],
'max_journeys': 2,
'time_offset': 10
}
]}}
VALID_CONFIG_DEST = {'sensor': {
'platform': 'rmvtransport',
'next_departure': [
{
'station': '3000010',
'destinations': ['Frankfurt (Main) Flughafen Regionalbahnhof',
'Frankfurt (Main) Stadion']
}
]}}
def get_departures_mock():
"""Mock rmvtransport departures loading."""
data = {'station': 'Frankfurt (Main) Hauptbahnhof',
'stationId': '3000010', 'filter': '11111111111', 'journeys': [
{'product': 'Tram', 'number': 12, 'trainId': '1123456',
'direction': 'Frankfurt (Main) Hugo-Junkers-Straße/Schleife',
'departure_time': datetime.datetime(2018, 8, 6, 14, 21),
'minutes': 7, 'delay': 3, 'stops': [
'Frankfurt (Main) Willy-Brandt-Platz',
'Frankfurt (Main) Römer/Paulskirche',
'Frankfurt (Main) Börneplatz',
'Frankfurt (Main) Konstablerwache',
'Frankfurt (Main) Bornheim Mitte',
'Frankfurt (Main) Saalburg-/Wittelsbacherallee',
'Frankfurt (Main) Eissporthalle/Festplatz',
'Frankfurt (Main) Hugo-Junkers-Straße/Schleife'],
'info': None, 'info_long': None,
'icon': 'https://products/32_pic.png'},
{'product': 'Bus', 'number': 21, 'trainId': '1234567',
'direction': 'Frankfurt (Main) Hugo-Junkers-Straße/Schleife',
'departure_time': datetime.datetime(2018, 8, 6, 14, 22),
'minutes': 8, 'delay': 1, 'stops': [
'Frankfurt (Main) Weser-/Münchener Straße',
'Frankfurt (Main) Hugo-Junkers-Straße/Schleife'],
'info': None, 'info_long': None,
'icon': 'https://products/32_pic.png'},
{'product': 'Bus', 'number': 12, 'trainId': '1234568',
'direction': 'Frankfurt (Main) Hugo-Junkers-Straße/Schleife',
'departure_time': datetime.datetime(2018, 8, 6, 14, 25),
'minutes': 11, 'delay': 1, 'stops': [
'Frankfurt (Main) Stadion'],
'info': None, 'info_long': None,
'icon': 'https://products/32_pic.png'},
{'product': 'Bus', 'number': 21, 'trainId': '1234569',
'direction': 'Frankfurt (Main) Hugo-Junkers-Straße/Schleife',
'departure_time': datetime.datetime(2018, 8, 6, 14, 25),
'minutes': 11, 'delay': 1, 'stops': [],
'info': None, 'info_long': None,
'icon': 'https://products/32_pic.png'},
{'product': 'Bus', 'number': 12, 'trainId': '1234570',
'direction': 'Frankfurt (Main) Hugo-Junkers-Straße/Schleife',
'departure_time': datetime.datetime(2018, 8, 6, 14, 25),
'minutes': 11, 'delay': 1, 'stops': [],
'info': None, 'info_long': None,
'icon': 'https://products/32_pic.png'},
{'product': 'Bus', 'number': 21, 'trainId': '1234571',
'direction': 'Frankfurt (Main) Hugo-Junkers-Straße/Schleife',
'departure_time': datetime.datetime(2018, 8, 6, 14, 25),
'minutes': 11, 'delay': 1, 'stops': [],
'info': None, 'info_long': None,
'icon': 'https://products/32_pic.png'}
]}
return data
def get_no_departures_mock():
"""Mock no departures in results."""
data = {'station': 'Frankfurt (Main) Hauptbahnhof',
'stationId': '3000010',
'filter': '11111111111',
'journeys': []}
return data
async def test_rmvtransport_min_config(hass):
"""Test minimal rmvtransport configuration."""
with patch('RMVtransport.RMVtransport.get_departures',
return_value=mock_coro(get_departures_mock())):
assert await async_setup_component(hass, 'sensor',
VALID_CONFIG_MINIMAL) is True
state = hass.states.get('sensor.frankfurt_main_hauptbahnhof')
assert state.state == '7'
assert state.attributes['departure_time'] == \
datetime.datetime(2018, 8, 6, 14, 21)
assert state.attributes['direction'] == \
'Frankfurt (Main) Hugo-Junkers-Straße/Schleife'
assert state.attributes['product'] == 'Tram'
assert state.attributes['line'] == 12
assert state.attributes['icon'] == 'mdi:tram'
assert state.attributes['friendly_name'] == 'Frankfurt (Main) Hauptbahnhof'
async def test_rmvtransport_name_config(hass):
"""Test custom name configuration."""
with patch('RMVtransport.RMVtransport.get_departures',
return_value=mock_coro(get_departures_mock())):
assert await async_setup_component(hass, 'sensor', VALID_CONFIG_NAME)
state = hass.states.get('sensor.my_station')
assert state.attributes['friendly_name'] == 'My Station'
async def test_rmvtransport_misc_config(hass):
"""Test misc configuration."""
with patch('RMVtransport.RMVtransport.get_departures',
return_value=mock_coro(get_departures_mock())):
assert await async_setup_component(hass, 'sensor', VALID_CONFIG_MISC)
state = hass.states.get('sensor.frankfurt_main_hauptbahnhof')
assert state.attributes['friendly_name'] == 'Frankfurt (Main) Hauptbahnhof'
assert state.attributes['line'] == 21
async def test_rmvtransport_dest_config(hass):
"""Test destination configuration."""
with patch('RMVtransport.RMVtransport.get_departures',
return_value=mock_coro(get_departures_mock())):
assert await async_setup_component(hass, 'sensor', VALID_CONFIG_DEST)
state = hass.states.get('sensor.frankfurt_main_hauptbahnhof')
assert state.state == '11'
assert state.attributes['direction'] == \
'Frankfurt (Main) Hugo-Junkers-Straße/Schleife'
assert state.attributes['line'] == 12
assert state.attributes['minutes'] == 11
assert state.attributes['departure_time'] == \
datetime.datetime(2018, 8, 6, 14, 25)
async def test_rmvtransport_no_departures(hass):
"""Test for no departures."""
with patch('RMVtransport.RMVtransport.get_departures',
return_value=mock_coro(get_no_departures_mock())):
assert await async_setup_component(hass, 'sensor',
VALID_CONFIG_MINIMAL)
state = hass.states.get('sensor.frankfurt_main_hauptbahnhof')
assert not state
|
indashnet/InDashNet.Open.UN2000 | refs/heads/master | android/external/mesa3d/src/gallium/drivers/llvmpipe/lp_tile_soa.py | 13 | #!/usr/bin/env python
CopyRight = '''
/**************************************************************************
*
* Copyright 2009 VMware, Inc.
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
**************************************************************************/
/**
* @file
* Pixel format accessor functions.
*
* @author Jose Fonseca <jfonseca@vmware.com>
*/
'''
import sys
import os.path
sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), '../../auxiliary/util'))
from u_format_pack import *
def is_format_supported(format):
'''Determines whether we actually have the plumbing necessary to generate the
to read/write to/from this format.'''
# FIXME: Ideally we would support any format combination here.
if format.name == 'PIPE_FORMAT_R11G11B10_FLOAT':
return True;
if format.name == 'PIPE_FORMAT_R9G9B9E5_FLOAT':
return True;
if format.layout != PLAIN:
return False
for i in range(4):
channel = format.channels[i]
if channel.type not in (VOID, UNSIGNED, SIGNED, FLOAT):
return False
if channel.type == FLOAT and channel.size not in (16, 32 ,64):
return False
if format.colorspace not in ('rgb', 'srgb'):
return False
return True
def generate_format_read(format, dst_channel, dst_native_type, dst_suffix):
'''Generate the function to read pixels from a particular format'''
name = format.short_name()
src_native_type = native_type(format)
print 'static void'
print 'lp_tile_%s_swizzle_%s(%s * restrict dst, const uint8_t * restrict src, unsigned src_stride, unsigned x0, unsigned y0)' % (name, dst_suffix, dst_native_type)
print '{'
print ' unsigned x, y;'
print ' const uint8_t *src_row = src + y0*src_stride;'
print ' for (y = 0; y < TILE_SIZE; ++y) {'
print ' const %s *src_pixel = (const %s *)(src_row + x0*%u);' % (src_native_type, src_native_type, format.stride())
print ' for (x = 0; x < TILE_SIZE; ++x) {'
names = ['']*4
if format.colorspace in ('rgb', 'srgb'):
for i in range(4):
swizzle = format.swizzles[i]
if swizzle < 4:
names[swizzle] += 'rgba'[i]
elif format.colorspace == 'zs':
swizzle = format.swizzles[0]
if swizzle < 4:
names[swizzle] = 'z'
else:
assert False
else:
assert False
if format.name == 'PIPE_FORMAT_R11G11B10_FLOAT':
print ' float tmp[3];'
print ' uint8_t r, g, b;'
print ' r11g11b10f_to_float3(*src_pixel++, tmp);'
for i in range(3):
print ' %s = tmp[%d] * 0xff;' % (names[i], i)
elif format.name == 'PIPE_FORMAT_R9G9B9E5_FLOAT':
print ' float tmp[3];'
print ' uint8_t r, g, b;'
print ' rgb9e5_to_float3(*src_pixel++, tmp);'
for i in range(3):
print ' %s = tmp[%d] * 0xff;' % (names[i], i)
elif format.layout == PLAIN:
if not format.is_array():
print ' %s pixel = *src_pixel++;' % src_native_type
shift = 0;
for i in range(4):
src_channel = format.channels[i]
width = src_channel.size
if names[i]:
value = 'pixel'
mask = (1 << width) - 1
if shift:
value = '(%s >> %u)' % (value, shift)
if shift + width < format.block_size():
value = '(%s & 0x%x)' % (value, mask)
value = conversion_expr(src_channel, dst_channel, dst_native_type, value, clamp=False)
print ' %s %s = %s;' % (dst_native_type, names[i], value)
shift += width
else:
for i in range(4):
if names[i]:
print ' %s %s;' % (dst_native_type, names[i])
for i in range(4):
src_channel = format.channels[i]
if names[i]:
value = '(*src_pixel++)'
value = conversion_expr(src_channel, dst_channel, dst_native_type, value, clamp=False)
print ' %s = %s;' % (names[i], value)
elif src_channel.size:
print ' ++src_pixel;'
else:
assert False
for i in range(4):
if format.colorspace in ('rgb', 'srgb'):
swizzle = format.swizzles[i]
if swizzle < 4:
value = names[swizzle]
elif swizzle == SWIZZLE_0:
value = '0'
elif swizzle == SWIZZLE_1:
value = get_one(dst_channel)
else:
assert False
elif format.colorspace == 'zs':
if i < 3:
value = 'z'
else:
value = get_one(dst_channel)
else:
assert False
print ' TILE_PIXEL(dst, x, y, %u) = %s; /* %s */' % (i, value, 'rgba'[i])
print ' }'
print ' src_row += src_stride;'
print ' }'
print '}'
print
def pack_rgba(format, src_channel, r, g, b, a):
"""Return an expression for packing r, g, b, a into a pixel of the
given format. Ex: '(b << 24) | (g << 16) | (r << 8) | (a << 0)'
"""
assert format.colorspace in ('rgb', 'srgb')
inv_swizzle = format.inv_swizzles()
shift = 0
expr = None
for i in range(4):
# choose r, g, b, or a depending on the inverse swizzle term
if inv_swizzle[i] == 0:
value = r
elif inv_swizzle[i] == 1:
value = g
elif inv_swizzle[i] == 2:
value = b
elif inv_swizzle[i] == 3:
value = a
else:
value = None
if value:
dst_channel = format.channels[i]
dst_native_type = native_type(format)
value = conversion_expr(src_channel, dst_channel, dst_native_type, value, clamp=False)
term = "((%s) << %d)" % (value, shift)
if expr:
expr = expr + " | " + term
else:
expr = term
width = format.channels[i].size
shift = shift + width
return expr
def emit_unrolled_unswizzle_code(format, src_channel):
'''Emit code for writing a block based on unrolled loops.
This is considerably faster than the TILE_PIXEL-based code below.
'''
dst_native_type = 'uint%u_t' % format.block_size()
print ' const unsigned dstpix_stride = dst_stride / %d;' % format.stride()
print ' %s *dstpix = (%s *) dst;' % (dst_native_type, dst_native_type)
print ' unsigned int qx, qy, i;'
print
print ' for (qy = 0; qy < TILE_SIZE; qy += TILE_VECTOR_HEIGHT) {'
print ' const unsigned py = y0 + qy;'
print ' for (qx = 0; qx < TILE_SIZE; qx += TILE_VECTOR_WIDTH) {'
print ' const unsigned px = x0 + qx;'
print ' const uint8_t *r = src + 0 * TILE_C_STRIDE;'
print ' const uint8_t *g = src + 1 * TILE_C_STRIDE;'
print ' const uint8_t *b = src + 2 * TILE_C_STRIDE;'
print ' const uint8_t *a = src + 3 * TILE_C_STRIDE;'
print ' (void) r; (void) g; (void) b; (void) a; /* silence warnings */'
print ' for (i = 0; i < TILE_C_STRIDE; i += 2) {'
print ' const uint32_t pixel0 = %s;' % pack_rgba(format, src_channel, "r[i+0]", "g[i+0]", "b[i+0]", "a[i+0]")
print ' const uint32_t pixel1 = %s;' % pack_rgba(format, src_channel, "r[i+1]", "g[i+1]", "b[i+1]", "a[i+1]")
print ' const unsigned offset = (py + tile_y_offset[i]) * dstpix_stride + (px + tile_x_offset[i]);'
print ' dstpix[offset + 0] = pixel0;'
print ' dstpix[offset + 1] = pixel1;'
print ' }'
print ' src += TILE_X_STRIDE;'
print ' }'
print ' }'
def emit_tile_pixel_unswizzle_code(format, src_channel):
'''Emit code for writing a block based on the TILE_PIXEL macro.'''
dst_native_type = native_type(format)
inv_swizzle = format.inv_swizzles()
print ' unsigned x, y;'
print ' uint8_t *dst_row = dst + y0*dst_stride;'
print ' for (y = 0; y < TILE_SIZE; ++y) {'
print ' %s *dst_pixel = (%s *)(dst_row + x0*%u);' % (dst_native_type, dst_native_type, format.stride())
print ' for (x = 0; x < TILE_SIZE; ++x) {'
if format.name == 'PIPE_FORMAT_R11G11B10_FLOAT':
print ' float tmp[3];'
for i in range(3):
print ' tmp[%d] = ubyte_to_float(TILE_PIXEL(src, x, y, %u));' % (i, inv_swizzle[i])
print ' *dst_pixel++ = float3_to_r11g11b10f(tmp);'
elif format.name == 'PIPE_FORMAT_R9G9B9E5_FLOAT':
print ' float tmp[3];'
for i in range(3):
print ' tmp[%d] = ubyte_to_float(TILE_PIXEL(src, x, y, %u));' % (i, inv_swizzle[i])
print ' *dst_pixel++ = float3_to_rgb9e5(tmp);'
elif format.layout == PLAIN:
if not format.is_array():
print ' %s pixel = 0;' % dst_native_type
shift = 0;
for i in range(4):
dst_channel = format.channels[i]
width = dst_channel.size
if inv_swizzle[i] is not None:
value = 'TILE_PIXEL(src, x, y, %u)' % inv_swizzle[i]
value = conversion_expr(src_channel, dst_channel, dst_native_type, value, clamp=False)
if shift:
value = '(%s << %u)' % (value, shift)
print ' pixel |= %s;' % value
shift += width
print ' *dst_pixel++ = pixel;'
else:
for i in range(4):
dst_channel = format.channels[i]
if inv_swizzle[i] is not None:
value = 'TILE_PIXEL(src, x, y, %u)' % inv_swizzle[i]
value = conversion_expr(src_channel, dst_channel, dst_native_type, value, clamp=False)
print ' *dst_pixel++ = %s;' % value
elif dst_channel.size:
print ' ++dst_pixel;'
else:
assert False
print ' }'
print ' dst_row += dst_stride;'
print ' }'
def generate_format_write(format, src_channel, src_native_type, src_suffix):
'''Generate the function to write pixels to a particular format'''
name = format.short_name()
print 'static void'
print 'lp_tile_%s_unswizzle_%s(const %s * restrict src, uint8_t * restrict dst, unsigned dst_stride, unsigned x0, unsigned y0)' % (name, src_suffix, src_native_type)
print '{'
if format.layout == PLAIN \
and format.colorspace == 'rgb' \
and format.block_size() <= 32 \
and format.is_pot() \
and not format.is_mixed() \
and (format.channels[0].type == UNSIGNED \
or format.channels[1].type == UNSIGNED):
emit_unrolled_unswizzle_code(format, src_channel)
else:
emit_tile_pixel_unswizzle_code(format, src_channel)
print '}'
print
def generate_sse2():
print '''
#if defined(PIPE_ARCH_SSE)
#include "util/u_sse.h"
static ALWAYS_INLINE void
swz4( const __m128i * restrict x,
const __m128i * restrict y,
const __m128i * restrict z,
const __m128i * restrict w,
__m128i * restrict a,
__m128i * restrict b,
__m128i * restrict c,
__m128i * restrict d)
{
__m128i i, j, k, l;
__m128i m, n, o, p;
__m128i e, f, g, h;
m = _mm_unpacklo_epi8(*x,*y);
n = _mm_unpackhi_epi8(*x,*y);
o = _mm_unpacklo_epi8(*z,*w);
p = _mm_unpackhi_epi8(*z,*w);
i = _mm_unpacklo_epi16(m,n);
j = _mm_unpackhi_epi16(m,n);
k = _mm_unpacklo_epi16(o,p);
l = _mm_unpackhi_epi16(o,p);
e = _mm_unpacklo_epi8(i,j);
f = _mm_unpackhi_epi8(i,j);
g = _mm_unpacklo_epi8(k,l);
h = _mm_unpackhi_epi8(k,l);
*a = _mm_unpacklo_epi64(e,g);
*b = _mm_unpackhi_epi64(e,g);
*c = _mm_unpacklo_epi64(f,h);
*d = _mm_unpackhi_epi64(f,h);
}
static ALWAYS_INLINE void
unswz4( const __m128i * restrict a,
const __m128i * restrict b,
const __m128i * restrict c,
const __m128i * restrict d,
__m128i * restrict x,
__m128i * restrict y,
__m128i * restrict z,
__m128i * restrict w)
{
__m128i i, j, k, l;
__m128i m, n, o, p;
i = _mm_unpacklo_epi8(*a,*b);
j = _mm_unpackhi_epi8(*a,*b);
k = _mm_unpacklo_epi8(*c,*d);
l = _mm_unpackhi_epi8(*c,*d);
m = _mm_unpacklo_epi16(i,k);
n = _mm_unpackhi_epi16(i,k);
o = _mm_unpacklo_epi16(j,l);
p = _mm_unpackhi_epi16(j,l);
*x = _mm_unpacklo_epi64(m,n);
*y = _mm_unpackhi_epi64(m,n);
*z = _mm_unpacklo_epi64(o,p);
*w = _mm_unpackhi_epi64(o,p);
}
static void
lp_tile_b8g8r8a8_unorm_swizzle_4ub_sse2(uint8_t * restrict dst,
const uint8_t * restrict src, unsigned src_stride,
unsigned x0, unsigned y0)
{
__m128i *dst128 = (__m128i *) dst;
unsigned x, y;
src += y0 * src_stride;
src += x0 * sizeof(uint32_t);
for (y = 0; y < TILE_SIZE; y += 4) {
const uint8_t *src_row = src;
for (x = 0; x < TILE_SIZE; x += 4) {
swz4((const __m128i *) (src_row + 0 * src_stride),
(const __m128i *) (src_row + 1 * src_stride),
(const __m128i *) (src_row + 2 * src_stride),
(const __m128i *) (src_row + 3 * src_stride),
dst128 + 2, /* b */
dst128 + 1, /* g */
dst128 + 0, /* r */
dst128 + 3); /* a */
dst128 += 4;
src_row += sizeof(__m128i);
}
src += 4 * src_stride;
}
}
static void
lp_tile_b8g8r8a8_unorm_unswizzle_4ub_sse2(const uint8_t * restrict src,
uint8_t * restrict dst, unsigned dst_stride,
unsigned x0, unsigned y0)
{
unsigned int x, y;
const __m128i *src128 = (const __m128i *) src;
dst += y0 * dst_stride;
dst += x0 * sizeof(uint32_t);
for (y = 0; y < TILE_SIZE; y += 4) {
const uint8_t *dst_row = dst;
for (x = 0; x < TILE_SIZE; x += 4) {
unswz4( &src128[2], /* b */
&src128[1], /* g */
&src128[0], /* r */
&src128[3], /* a */
(__m128i *) (dst_row + 0 * dst_stride),
(__m128i *) (dst_row + 1 * dst_stride),
(__m128i *) (dst_row + 2 * dst_stride),
(__m128i *) (dst_row + 3 * dst_stride));
src128 += 4;
dst_row += sizeof(__m128i);;
}
dst += 4 * dst_stride;
}
}
static void
lp_tile_b8g8r8x8_unorm_swizzle_4ub_sse2(uint8_t * restrict dst,
const uint8_t * restrict src, unsigned src_stride,
unsigned x0, unsigned y0)
{
__m128i *dst128 = (__m128i *) dst;
unsigned x, y;
src += y0 * src_stride;
src += x0 * sizeof(uint32_t);
for (y = 0; y < TILE_SIZE; y += 4) {
const uint8_t *src_row = src;
for (x = 0; x < TILE_SIZE; x += 4) {
swz4((const __m128i *) (src_row + 0 * src_stride),
(const __m128i *) (src_row + 1 * src_stride),
(const __m128i *) (src_row + 2 * src_stride),
(const __m128i *) (src_row + 3 * src_stride),
dst128 + 2, /* b */
dst128 + 1, /* g */
dst128 + 0, /* r */
dst128 + 3); /* a */
dst128 += 4;
src_row += sizeof(__m128i);
}
src += 4 * src_stride;
}
}
static void
lp_tile_b8g8r8x8_unorm_unswizzle_4ub_sse2(const uint8_t * restrict src,
uint8_t * restrict dst, unsigned dst_stride,
unsigned x0, unsigned y0)
{
unsigned int x, y;
const __m128i *src128 = (const __m128i *) src;
dst += y0 * dst_stride;
dst += x0 * sizeof(uint32_t);
for (y = 0; y < TILE_SIZE; y += 4) {
const uint8_t *dst_row = dst;
for (x = 0; x < TILE_SIZE; x += 4) {
unswz4( &src128[2], /* b */
&src128[1], /* g */
&src128[0], /* r */
&src128[3], /* a */
(__m128i *) (dst_row + 0 * dst_stride),
(__m128i *) (dst_row + 1 * dst_stride),
(__m128i *) (dst_row + 2 * dst_stride),
(__m128i *) (dst_row + 3 * dst_stride));
src128 += 4;
dst_row += sizeof(__m128i);;
}
dst += 4 * dst_stride;
}
}
#endif /* PIPE_ARCH_SSE */
'''
def generate_swizzle(formats, dst_channel, dst_native_type, dst_suffix):
'''Generate the dispatch function to read pixels from any format'''
for format in formats:
if is_format_supported(format):
generate_format_read(format, dst_channel, dst_native_type, dst_suffix)
print 'void'
print 'lp_tile_swizzle_%s(enum pipe_format format, %s *dst, const void *src, unsigned src_stride, unsigned x, unsigned y)' % (dst_suffix, dst_native_type)
print '{'
print ' void (*func)(%s * restrict dst, const uint8_t * restrict src, unsigned src_stride, unsigned x0, unsigned y0);' % dst_native_type
print '#ifdef DEBUG'
print ' lp_tile_swizzle_count += 1;'
print '#endif'
print ' switch(format) {'
for format in formats:
if is_format_supported(format):
print ' case %s:' % format.name
func_name = 'lp_tile_%s_swizzle_%s' % (format.short_name(), dst_suffix)
if format.name == 'PIPE_FORMAT_B8G8R8A8_UNORM' or format.name == 'PIPE_FORMAT_B8G8R8X8_UNORM':
print '#ifdef PIPE_ARCH_SSE'
print ' func = util_cpu_caps.has_sse2 ? %s_sse2 : %s;' % (func_name, func_name)
print '#else'
print ' func = %s;' % (func_name,)
print '#endif'
else:
print ' func = %s;' % (func_name,)
print ' break;'
print ' default:'
print ' debug_printf("%s: unsupported format %s\\n", __FUNCTION__, util_format_name(format));'
print ' return;'
print ' }'
print ' func(dst, (const uint8_t *)src, src_stride, x, y);'
print '}'
print
def generate_unswizzle(formats, src_channel, src_native_type, src_suffix):
'''Generate the dispatch function to write pixels to any format'''
for format in formats:
if is_format_supported(format):
generate_format_write(format, src_channel, src_native_type, src_suffix)
print 'void'
print 'lp_tile_unswizzle_%s(enum pipe_format format, const %s *src, void *dst, unsigned dst_stride, unsigned x, unsigned y)' % (src_suffix, src_native_type)
print '{'
print ' void (*func)(const %s * restrict src, uint8_t * restrict dst, unsigned dst_stride, unsigned x0, unsigned y0);' % src_native_type
print '#ifdef DEBUG'
print ' lp_tile_unswizzle_count += 1;'
print '#endif'
print ' switch(format) {'
for format in formats:
if is_format_supported(format):
print ' case %s:' % format.name
func_name = 'lp_tile_%s_unswizzle_%s' % (format.short_name(), src_suffix)
if format.name == 'PIPE_FORMAT_B8G8R8A8_UNORM' or format.name == 'PIPE_FORMAT_B8G8R8X8_UNORM':
print '#ifdef PIPE_ARCH_SSE'
print ' func = util_cpu_caps.has_sse2 ? %s_sse2 : %s;' % (func_name, func_name)
print '#else'
print ' func = %s;' % (func_name,)
print '#endif'
else:
print ' func = %s;' % (func_name,)
print ' break;'
print ' default:'
print ' debug_printf("%s: unsupported format %s\\n", __FUNCTION__, util_format_name(format));'
print ' return;'
print ' }'
print ' func(src, (uint8_t *)dst, dst_stride, x, y);'
print '}'
print
def main():
formats = []
for arg in sys.argv[1:]:
formats.extend(parse(arg))
print '/* This file is autogenerated by lp_tile_soa.py from u_format.csv. Do not edit directly. */'
print
# This will print the copyright message on the top of this file
print CopyRight.strip()
print
print '#include "pipe/p_compiler.h"'
print '#include "util/u_math.h"'
print '#include "util/u_format.h"'
print '#include "util/u_format_r11g11b10f.h"'
print '#include "util/u_format_rgb9e5.h"'
print '#include "util/u_half.h"'
print '#include "util/u_cpu_detect.h"'
print '#include "lp_tile_soa.h"'
print
print '#ifdef DEBUG'
print 'unsigned lp_tile_unswizzle_count = 0;'
print 'unsigned lp_tile_swizzle_count = 0;'
print '#endif'
print
print 'const unsigned char'
print 'tile_offset[TILE_VECTOR_HEIGHT][TILE_VECTOR_WIDTH] = {'
print ' { 0, 1, 4, 5},'
print ' { 2, 3, 6, 7},'
print ' { 8, 9, 12, 13},'
print ' { 10, 11, 14, 15}'
print '};'
print
print '/* Note: these lookup tables could be replaced with some'
print ' * bit-twiddling code, but this is a little faster.'
print ' */'
print 'static unsigned tile_x_offset[TILE_VECTOR_WIDTH * TILE_VECTOR_HEIGHT] = {'
print ' 0, 1, 0, 1, 2, 3, 2, 3,'
print ' 0, 1, 0, 1, 2, 3, 2, 3'
print '};'
print
print 'static unsigned tile_y_offset[TILE_VECTOR_WIDTH * TILE_VECTOR_HEIGHT] = {'
print ' 0, 0, 1, 1, 0, 0, 1, 1,'
print ' 2, 2, 3, 3, 2, 2, 3, 3'
print '};'
print
generate_sse2()
channel = Channel(UNSIGNED, True, False, 8)
native_type = 'uint8_t'
suffix = '4ub'
generate_swizzle(formats, channel, native_type, suffix)
generate_unswizzle(formats, channel, native_type, suffix)
if __name__ == '__main__':
main()
|
jasonabele/gnuradio | refs/heads/rfx2200 | gnuradio-core/src/python/gnuradio/blks2impl/dqpsk2.py | 9 | #
# Copyright 2009,2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# See gnuradio-examples/python/digital for examples
"""
differential QPSK modulation and demodulation.
"""
from gnuradio import gr, gru, modulation_utils2
from math import pi, sqrt
import psk
import cmath
from pprint import pprint
# default values (used in __init__ and add_options)
_def_samples_per_symbol = 2
_def_excess_bw = 0.35
_def_gray_code = True
_def_verbose = False
_def_log = False
_def_freq_alpha = 0.010
_def_phase_alpha = 0.01
_def_timing_alpha = 0.100
_def_timing_beta = 0.010
_def_timing_max_dev = 1.5
# /////////////////////////////////////////////////////////////////////////////
# DQPSK modulator
# /////////////////////////////////////////////////////////////////////////////
class dqpsk2_mod(gr.hier_block2):
def __init__(self,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
"""
Hierarchical block for RRC-filtered QPSK modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
@param samples_per_symbol: samples per symbol >= 2
@type samples_per_symbol: integer
@param excess_bw: Root-raised cosine filter excess bandwidth
@type excess_bw: float
@param gray_code: Tell modulator to Gray code the bits
@type gray_code: bool
@param verbose: Print information about modulator?
@type verbose: bool
@param debug: Print modualtion data to files?
@type debug: bool
"""
gr.hier_block2.__init__(self, "dqpsk2_mod",
gr.io_signature(1, 1, gr.sizeof_char), # Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex)) # Output signature
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._gray_code = gray_code
if samples_per_symbol < 2:
raise TypeError, ("sbp must be >= 2, is %f" % samples_per_symbol)
ntaps = 11 * samples_per_symbol
arity = pow(2,self.bits_per_symbol())
# turn bytes into k-bit vectors
self.bytes2chunks = \
gr.packed_to_unpacked_bb(self.bits_per_symbol(), gr.GR_MSB_FIRST)
if self._gray_code:
self.symbol_mapper = gr.map_bb(psk.binary_to_gray[arity])
else:
self.symbol_mapper = gr.map_bb(psk.binary_to_ungray[arity])
self.diffenc = gr.diff_encoder_bb(arity)
rot = .707 + .707j
rotated_const = map(lambda pt: pt * rot, psk.constellation[arity])
self.chunks2symbols = gr.chunks_to_symbols_bc(rotated_const)
# pulse shaping filter
nfilts = 32
ntaps = 11 * int(nfilts * self._samples_per_symbol) # make nfilts filters of ntaps each
self.rrc_taps = gr.firdes.root_raised_cosine(
nfilts, # gain
nfilts, # sampling rate based on 32 filters in resampler
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps)
self.rrc_filter = gr.pfb_arb_resampler_ccf(self._samples_per_symbol, self.rrc_taps)
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect & Initialize base class
self.connect(self, self.bytes2chunks, self.symbol_mapper, self.diffenc,
self.chunks2symbols, self.rrc_filter, self)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 2
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
def _print_verbage(self):
print "\nModulator:"
print "bits per symbol: %d" % self.bits_per_symbol()
print "Gray code: %s" % self._gray_code
print "RRS roll-off factor: %f" % self._excess_bw
def _setup_logging(self):
print "Modulation logging turned on."
self.connect(self.bytes2chunks,
gr.file_sink(gr.sizeof_char, "tx_bytes2chunks.dat"))
self.connect(self.symbol_mapper,
gr.file_sink(gr.sizeof_char, "tx_graycoder.dat"))
self.connect(self.diffenc,
gr.file_sink(gr.sizeof_char, "tx_diffenc.dat"))
self.connect(self.chunks2symbols,
gr.file_sink(gr.sizeof_gr_complex, "tx_chunks2symbols.dat"))
self.connect(self.rrc_filter,
gr.file_sink(gr.sizeof_gr_complex, "tx_rrc_filter.dat"))
def add_options(parser):
"""
Adds QPSK modulation-specific options to the standard parser
"""
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwith factor [default=%default] (PSK)")
parser.add_option("", "--no-gray-code", dest="gray_code",
action="store_false", default=_def_gray_code,
help="disable gray coding on modulated bits (PSK)")
add_options=staticmethod(add_options)
def extract_kwargs_from_options(options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return modulation_utils2.extract_kwargs_from_options(dqpsk2_mod.__init__,
('self',), options)
extract_kwargs_from_options=staticmethod(extract_kwargs_from_options)
# /////////////////////////////////////////////////////////////////////////////
# DQPSK demodulator
#
# Differentially coherent detection of differentially encoded qpsk
# /////////////////////////////////////////////////////////////////////////////
class dqpsk2_demod(gr.hier_block2):
def __init__(self,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
freq_alpha=_def_freq_alpha,
phase_alpha=_def_phase_alpha,
timing_alpha=_def_timing_alpha,
timing_max_dev=_def_timing_max_dev,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log,
sync_out=False):
"""
Hierarchical block for RRC-filtered DQPSK demodulation
The input is the complex modulated signal at baseband.
The output is a stream of bits packed 1 bit per byte (LSB)
@param samples_per_symbol: samples per symbol >= 2
@type samples_per_symbol: float
@param excess_bw: Root-raised cosine filter excess bandwidth
@type excess_bw: float
@param freq_alpha: loop filter gain for frequency recovery
@type freq_alpha: float
@param phase_alpha: loop filter gain
@type phase_alphas: float
@param timing_alpha: timing loop alpha gain
@type timing_alpha: float
@param timing_max: timing loop maximum rate deviations
@type timing_max: float
@param gray_code: Tell modulator to Gray code the bits
@type gray_code: bool
@param verbose: Print information about modulator?
@type verbose: bool
@param log: Print modualtion data to files?
@type log: bool
@param sync_out: Output a sync signal on :1?
@type sync_out: bool
"""
if sync_out: io_sig_out = gr.io_signaturev(2, 2, (gr.sizeof_char, gr.sizeof_gr_complex))
else: io_sig_out = gr.io_signature(1, 1, gr.sizeof_char)
gr.hier_block2.__init__(self, "dqpsk2_demod",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
io_sig_out) # Output signature
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._freq_alpha = freq_alpha
self._freq_beta = 0.25*self._freq_alpha**2
self._phase_alpha = phase_alpha
self._timing_alpha = timing_alpha
self._timing_beta = _def_timing_beta
self._timing_max_dev=timing_max_dev
self._gray_code = gray_code
if samples_per_symbol < 2:
raise TypeError, "sbp must be >= 2, is %d" % samples_per_symbol
arity = pow(2,self.bits_per_symbol())
# Automatic gain control
self.agc = gr.agc2_cc(0.6e-1, 1e-3, 1, 1, 100)
#self.agc = gr.feedforward_agc_cc(16, 2.0)
# Frequency correction
self.freq_recov = gr.fll_band_edge_cc(self._samples_per_symbol, self._excess_bw,
11*int(self._samples_per_symbol),
self._freq_alpha, self._freq_beta)
# symbol timing recovery with RRC data filter
nfilts = 32
ntaps = 11 * int(samples_per_symbol*nfilts)
taps = gr.firdes.root_raised_cosine(nfilts, nfilts,
1.0/float(self._samples_per_symbol),
self._excess_bw, ntaps)
self.time_recov = gr.pfb_clock_sync_ccf(self._samples_per_symbol,
self._timing_alpha,
taps, nfilts, nfilts/2, self._timing_max_dev)
self.time_recov.set_beta(self._timing_beta)
# Perform phase / fine frequency correction
self._phase_beta = 0.25 * self._phase_alpha * self._phase_alpha
# Allow a frequency swing of +/- half of the sample rate
fmin = -0.5
fmax = 0.5
self.phase_recov = gr.costas_loop_cc(self._phase_alpha,
self._phase_beta,
fmax, fmin, arity)
# Perform Differential decoding on the constellation
self.diffdec = gr.diff_phasor_cc()
# find closest constellation point
rot = 1
rotated_const = map(lambda pt: pt * rot, psk.constellation[arity])
self.slicer = gr.constellation_decoder_cb(rotated_const, range(arity))
if self._gray_code:
self.symbol_mapper = gr.map_bb(psk.gray_to_binary[arity])
else:
self.symbol_mapper = gr.map_bb(psk.ungray_to_binary[arity])
# unpack the k bit vector into a stream of bits
self.unpack = gr.unpack_k_bits_bb(self.bits_per_symbol())
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect
self.connect(self, self.agc,
self.freq_recov, self.time_recov, self.phase_recov,
self.diffdec, self.slicer, self.symbol_mapper, self.unpack, self)
if sync_out: self.connect(self.time_recov, (self, 1))
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 2
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
def _print_verbage(self):
print "\nDemodulator:"
print "bits per symbol: %d" % self.bits_per_symbol()
print "Gray code: %s" % self._gray_code
print "RRC roll-off factor: %.2f" % self._excess_bw
print "FLL gain: %.2f" % self._freq_alpha
print "Timing alpha gain: %.2f" % self._timing_alpha
print "Timing beta gain: %.2f" % self._timing_beta
print "Timing max dev: %.2f" % self._timing_max_dev
print "Phase track alpha: %.2e" % self._phase_alpha
print "Phase track beta: %.2e" % self._phase_beta
def _setup_logging(self):
print "Modulation logging turned on."
self.connect(self.agc,
gr.file_sink(gr.sizeof_gr_complex, "rx_agc.dat"))
self.connect(self.freq_recov,
gr.file_sink(gr.sizeof_gr_complex, "rx_freq_recov.dat"))
self.connect(self.time_recov,
gr.file_sink(gr.sizeof_gr_complex, "rx_time_recov.dat"))
self.connect(self.phase_recov,
gr.file_sink(gr.sizeof_gr_complex, "rx_phase_recov.dat"))
self.connect(self.diffdec,
gr.file_sink(gr.sizeof_gr_complex, "rx_diffdec.dat"))
self.connect(self.slicer,
gr.file_sink(gr.sizeof_char, "rx_slicer.dat"))
self.connect(self.symbol_mapper,
gr.file_sink(gr.sizeof_char, "rx_gray_decoder.dat"))
self.connect(self.unpack,
gr.file_sink(gr.sizeof_char, "rx_unpack.dat"))
def add_options(parser):
"""
Adds DQPSK demodulation-specific options to the standard parser
"""
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwith factor [default=%default] (PSK)")
parser.add_option("", "--no-gray-code", dest="gray_code",
action="store_false", default=_def_gray_code,
help="disable gray coding on modulated bits (PSK)")
parser.add_option("", "--freq-alpha", type="float", default=_def_freq_alpha,
help="set frequency lock loop alpha gain value [default=%default] (PSK)")
parser.add_option("", "--phase-alpha", type="float", default=_def_phase_alpha,
help="set phase tracking loop alpha value [default=%default] (PSK)")
parser.add_option("", "--timing-alpha", type="float", default=_def_timing_alpha,
help="set timing symbol sync loop gain alpha value [default=%default] (GMSK/PSK)")
parser.add_option("", "--timing-beta", type="float", default=_def_timing_beta,
help="set timing symbol sync loop gain beta value [default=%default] (GMSK/PSK)")
parser.add_option("", "--timing-max-dev", type="float", default=_def_timing_max_dev,
help="set timing symbol sync loop maximum deviation [default=%default] (GMSK/PSK)")
add_options=staticmethod(add_options)
def extract_kwargs_from_options(options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return modulation_utils2.extract_kwargs_from_options(
dqpsk2_demod.__init__, ('self',), options)
extract_kwargs_from_options=staticmethod(extract_kwargs_from_options)
#
# Add these to the mod/demod registry
#
modulation_utils2.add_type_1_mod('dqpsk2', dqpsk2_mod)
modulation_utils2.add_type_1_demod('dqpsk2', dqpsk2_demod)
|
mlskit/astromlskit | refs/heads/master | DOE/doefront1.py | 3 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'fullfactorial.ui'
#
# Created: Wed Apr 08 07:09:29 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(240, 182)
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 221, 61))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.lineEdit = QtGui.QLineEdit(self.groupBox)
self.lineEdit.setGeometry(QtCore.QRect(40, 20, 141, 20))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.pushButton_3 = QtGui.QPushButton(Form)
self.pushButton_3.setGeometry(QtCore.QRect(40, 140, 161, 23))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.pushButton_2 = QtGui.QPushButton(Form)
self.pushButton_2.setGeometry(QtCore.QRect(40, 110, 161, 23))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.pushButton = QtGui.QPushButton(Form)
self.pushButton.setGeometry(QtCore.QRect(40, 80, 161, 23))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.groupBox.setTitle(_translate("Form", "DOE Name", None))
self.lineEdit.setText(_translate("Form", "Full Factorial", None))
self.pushButton_3.setText(_translate("Form", "Start", None))
self.pushButton_2.setText(_translate("Form", "Output Folder", None))
self.pushButton.setText(_translate("Form", "Input Levels File", None))
|
sirpercival/kivy | refs/heads/master | kivy/input/postproc/retaintouch.py | 80 | '''
Retain Touch
============
Reuse touch to counter lost finger behavior
'''
__all__ = ('InputPostprocRetainTouch', )
from kivy.config import Config
from kivy.vector import Vector
import time
class InputPostprocRetainTouch(object):
'''
InputPostprocRetainTouch is a post-processor to delay the 'up' event of a
touch, to reuse it under certains conditions. This module is designed to
prevent lost finger touches on some hardware/setups.
Retain touch can be configured in the Kivy config file::
[postproc]
retain_time = 100
retain_distance = 50
The distance parameter is in the range 0-1000 and time is in milliseconds.
'''
def __init__(self):
self.timeout = Config.getint('postproc', 'retain_time') / 1000.0
self.distance = Config.getint('postproc', 'retain_distance') / 1000.0
self._available = []
self._links = {}
def process(self, events):
# check if module is disabled
if self.timeout == 0:
return events
d = time.time()
for etype, touch in events[:]:
if not touch.is_touch:
continue
if etype == 'end':
events.remove((etype, touch))
if touch.uid in self._links:
selection = self._links[touch.uid]
selection.ud.__pp_retain_time__ = d
self._available.append(selection)
del self._links[touch.uid]
else:
touch.ud.__pp_retain_time__ = d
self._available.append(touch)
elif etype == 'update':
if touch.uid in self._links:
selection = self._links[touch.uid]
selection.x = touch.x
selection.y = touch.y
selection.sx = touch.sx
selection.sy = touch.sy
events.remove((etype, touch))
events.append((etype, selection))
else:
pass
elif etype == 'begin':
# new touch, found the nearest one
selection = None
selection_distance = 99999
for touch2 in self._available:
touch_distance = Vector(touch2.spos).distance(touch.spos)
if touch_distance > self.distance:
continue
if touch2.__class__ != touch.__class__:
continue
if touch_distance < selection_distance:
# eligible for continuation
selection_distance = touch_distance
selection = touch2
if selection is None:
continue
self._links[touch.uid] = selection
self._available.remove(selection)
events.remove((etype, touch))
for touch in self._available[:]:
t = touch.ud.__pp_retain_time__
if d - t > self.timeout:
self._available.remove(touch)
events.append(('end', touch))
return events
|
veatch/elasticsearch-py | refs/heads/master | elasticsearch/client/cat.py | 4 | from .utils import NamespacedClient, query_params, _make_path
class CatClient(NamespacedClient):
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def aliases(self, name=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-alias.html>`_
:arg name: A comma-separated list of alias names to return
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'aliases', name), params=params)
return data
@query_params('bytes', 'h', 'help', 'local', 'master_timeout', 'v')
def allocation(self, node_id=None, params=None):
"""
Allocation provides a snapshot of how shards have located around the
cluster and the state of disk usage.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-allocation.html>`_
:arg node_id: A comma-separated list of node IDs or names to limit the
returned information
:arg bytes: The unit in which to display byte values, valid choices are:
'b', 'k', 'm', 'g'
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'allocation', node_id), params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def count(self, index=None, params=None):
"""
Count provides quick access to the document count of the entire cluster,
or individual indices.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-count.html>`_
:arg index: A comma-separated list of index names to limit the returned
information
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'count', index), params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'ts', 'v')
def health(self, params=None):
"""
health is a terse, one-line representation of the same information from
:meth:`~elasticsearch.client.cluster.ClusterClient.health` API
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-health.html>`_
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg ts: Set to false to disable timestamping, default True
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/health',
params=params)
return data
@query_params('help')
def help(self, params=None):
"""
A simple help for the cat api.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat.html>`_
:arg help: Return help information, default False
"""
_, data = self.transport.perform_request('GET', '/_cat', params=params)
return data
@query_params('bytes', 'h', 'help', 'local', 'master_timeout', 'pri', 'v')
def indices(self, index=None, params=None):
"""
The indices command provides a cross-section of each index.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-indices.html>`_
:arg index: A comma-separated list of index names to limit the returned
information
:arg bytes: The unit in which to display byte values, valid choices are:
'b', 'k', 'm', 'g'
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg pri: Set to true to return stats only for primary shards, default
False
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'indices', index), params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def master(self, params=None):
"""
Displays the master's node ID, bound IP address, and node name.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-master.html>`_
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/master',
params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def nodes(self, params=None):
"""
The nodes command shows the cluster topology.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-nodes.html>`_
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/nodes',
params=params)
return data
@query_params('bytes', 'h', 'help', 'master_timeout', 'v')
def recovery(self, index=None, params=None):
"""
recovery is a view of shard replication.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-recovery.html>`_
:arg index: A comma-separated list of index names to limit the returned
information
:arg bytes: The unit in which to display byte values, valid choices are:
'b', 'k', 'm', 'g'
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'recovery', index), params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def shards(self, index=None, params=None):
"""
The shards command is the detailed view of what nodes contain which shards.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-shards.html>`_
:arg index: A comma-separated list of index names to limit the returned
information
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'shards', index), params=params)
return data
@query_params('h', 'help', 'v')
def segments(self, index=None, params=None):
"""
The segments command is the detailed view of Lucene segments per index.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-segments.html>`_
:arg index: A comma-separated list of index names to limit the returned
information
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'segments', index), params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def pending_tasks(self, params=None):
"""
pending_tasks provides the same information as the
:meth:`~elasticsearch.client.cluster.ClusterClient.pending_tasks` API
in a convenient tabular format.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-pending-tasks.html>`_
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/pending_tasks',
params=params)
return data
@query_params('full_id', 'h', 'help', 'local', 'master_timeout', 'v')
def thread_pool(self, params=None):
"""
Get information about thread pools.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-thread-pool.html>`_
:arg full_id: Enables displaying the complete node ids, default False
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/thread_pool',
params=params)
return data
@query_params('bytes', 'h', 'help', 'local', 'master_timeout', 'v')
def fielddata(self, fields=None, params=None):
"""
Shows information about currently loaded fielddata on a per-node basis.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-fielddata.html>`_
:arg fields: A comma-separated list of fields to return the fielddata
size
:arg bytes: The unit in which to display byte values, valid choices are:
'b', 'k', 'm', 'g'
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', _make_path('_cat',
'fielddata', fields), params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def plugins(self, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-plugins.html>`_
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/plugins',
params=params)
return data
@query_params('h', 'help', 'local', 'master_timeout', 'v')
def nodeattrs(self, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-nodeattrs.html>`_
:arg h: Comma-separated list of column names to display
:arg help: Return help information, default False
:arg local: Return local information, do not retrieve the state from
master node (default: false)
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg v: Verbose mode. Display column headers, default True
"""
_, data = self.transport.perform_request('GET', '/_cat/nodeattrs',
params=params)
return data
|
hammerlab/varlens | refs/heads/master | varlens/reads_util.py | 2 | # Copyright (c) 2015. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import functools
from .read_source import ReadSource
from . import util
BOOLEAN_PROPERTIES = """
is_paired is_proper_pair is_qcfail is_read1 is_read2 is_reverse is_secondary
is_unmapped mate_is_reverse mate_is_unmapped is_duplicate
""".split()
STRING_PROPERTIES = """
cigarstring query_alignment_sequence query_name
""".split()
INT_PROPERTIES = """
inferred_length mapping_quality query_alignment_length query_alignment_start
query_length reference_length reference_start template_length
""".split()
# name -> (type, help, filter function)
READ_FILTERS = collections.OrderedDict()
for prop in BOOLEAN_PROPERTIES:
READ_FILTERS[prop] = (
bool,
"Only reads where %s is True" % prop,
functools.partial(
(lambda field_name, parsed_value, read:
bool(getattr(read, field_name))),
prop)
)
READ_FILTERS["not_" + prop] = (
bool,
"Only reads where %s is False" % prop,
functools.partial(
(lambda field_name, parsed_value, read:
not getattr(read, field_name)),
prop)
)
def field_contains(field_name, parsed_value, read):
field_value = getattr(read, field_name)
return field_value is not None and parsed_value in field_value
for prop in STRING_PROPERTIES:
READ_FILTERS["%s" % prop] = (
str,
"Only reads with the specified %s" % prop,
functools.partial(
(lambda field_name, parsed_value, read:
getattr(read, field_name) == parsed_value),
prop)
)
READ_FILTERS["%s_contains" % prop] = (
str,
"Only reads where %s contains the given string" % prop,
functools.partial(field_contains, prop))
for prop in INT_PROPERTIES:
READ_FILTERS["%s" % prop] = (
int,
"Only reads with the specified %s" % prop,
functools.partial(
(lambda field_name, parsed_value, read:
getattr(read, field_name) == parsed_value),
prop)
)
READ_FILTERS["min_%s" % prop] = (
int,
"Only reads where %s >=N" % prop,
functools.partial(
(lambda field_name, parsed_value, read:
getattr(read, field_name) >= parsed_value),
prop)
)
READ_FILTERS["max_%s" % prop] = (
int,
"Only reads where %s <=N" % prop,
functools.partial(
(lambda field_name, parsed_value, read:
getattr(read, field_name) <= parsed_value),
prop)
)
def add_args(parser, positional=False):
"""
Extends a commandline argument parser with arguments for specifying
read sources.
"""
group = parser.add_argument_group("read loading")
group.add_argument("reads" if positional else "--reads",
nargs="+", default=[],
help="Paths to bam files. Any number of paths may be specified.")
group.add_argument(
"--read-source-name",
nargs="+",
help="Names for each read source. The number of names specified "
"must match the number of bam files. If not specified, filenames are "
"used for names.")
# Add filters
group = parser.add_argument_group(
"read filtering",
"A number of read filters are available. See the pysam "
"documentation (http://pysam.readthedocs.org/en/latest/api.html) "
"for details on what these fields mean. When multiple filter "
"options are specified, reads must match *all* filters.")
for (name, (kind, message, function)) in READ_FILTERS.items():
extra = {}
if kind is bool:
extra["action"] = "store_true"
extra["default"] = None
elif kind is int:
extra["type"] = int
extra["metavar"] = "N"
elif kind is str:
extra["metavar"] = "STRING"
group.add_argument("--" + name.replace("_", "-"),
help=message,
**extra)
def load_from_args(args):
"""
Given parsed commandline arguments, returns a list of ReadSource objects
"""
if not args.reads:
return None
if args.read_source_name:
read_source_names = util.expand(
args.read_source_name,
'read_source_name',
'read source',
len(args.reads))
else:
read_source_names = util.drop_prefix(args.reads)
filters = []
for (name, info) in READ_FILTERS.items():
value = getattr(args, name)
if value is not None:
filters.append(functools.partial(info[-1], value))
return [
load_bam(filename, name, filters)
for (filename, name)
in zip(args.reads, read_source_names)
]
def load_bam(filename, name=None, filters=[]):
if not name:
name = filename
return ReadSource(name, filename, filters)
def flatten_header(header):
for (group, rows) in header.items():
for (index, row) in enumerate(rows):
if not isinstance(row, dict):
key_values = [(row, "")]
else:
key_values = row.items()
for (key, value) in key_values:
yield (str(group), index, str(key), str(value))
|
javierhuerta/unach-photo-server | refs/heads/master | unach_photo_server/apps.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class UnachPhotoServerConfig(AppConfig):
name = 'unach_photo_server'
|
jwinzer/OpenSlides | refs/heads/master | server/openslides/assignments/migrations/0001_initial.py | 9 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-02 01:22
from __future__ import unicode_literals
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import openslides.utils.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("core", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="Assignment",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("title", models.CharField(max_length=100)),
("description", models.TextField(blank=True)),
("open_posts", models.PositiveSmallIntegerField()),
(
"poll_description_default",
models.CharField(blank=True, max_length=79),
),
(
"phase",
models.IntegerField(
choices=[
(0, "Searching for candidates"),
(1, "Voting"),
(2, "Finished"),
],
default=0,
),
),
],
options={
"verbose_name": "Election",
"default_permissions": (),
"permissions": (
("can_see", "Can see elections"),
("can_nominate_other", "Can nominate another participant"),
("can_nominate_self", "Can nominate oneself"),
("can_manage", "Can manage elections"),
),
"ordering": ("title",),
},
bases=(openslides.utils.models.RESTModelMixin, models.Model),
),
migrations.CreateModel(
name="AssignmentOption",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"candidate",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={"default_permissions": ()},
bases=(openslides.utils.models.RESTModelMixin, models.Model),
),
migrations.CreateModel(
name="AssignmentPoll",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"votesvalid",
openslides.utils.models.MinMaxIntegerField(blank=True, null=True),
),
(
"votesinvalid",
openslides.utils.models.MinMaxIntegerField(blank=True, null=True),
),
(
"votescast",
openslides.utils.models.MinMaxIntegerField(blank=True, null=True),
),
("published", models.BooleanField(default=False)),
("yesnoabstain", models.BooleanField(default=False)),
("description", models.CharField(blank=True, max_length=79)),
(
"assignment",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="polls",
to="assignments.Assignment",
),
),
],
options={"default_permissions": ()},
bases=(openslides.utils.models.RESTModelMixin, models.Model),
),
migrations.CreateModel(
name="AssignmentRelatedUser",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("elected", models.BooleanField(default=False)),
(
"assignment",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assignment_related_users",
to="assignments.Assignment",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={"default_permissions": ()},
bases=(openslides.utils.models.RESTModelMixin, models.Model),
),
migrations.CreateModel(
name="AssignmentVote",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("weight", models.IntegerField(default=1, null=True)),
("value", models.CharField(max_length=255, null=True)),
(
"option",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="votes",
to="assignments.AssignmentOption",
),
),
],
options={"default_permissions": ()},
bases=(openslides.utils.models.RESTModelMixin, models.Model),
),
migrations.AddField(
model_name="assignmentoption",
name="poll",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="options",
to="assignments.AssignmentPoll",
),
),
migrations.AddField(
model_name="assignment",
name="related_users",
field=models.ManyToManyField(
through="assignments.AssignmentRelatedUser", to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="assignment",
name="tags",
field=models.ManyToManyField(blank=True, to="core.Tag"),
),
migrations.AlterUniqueTogether(
name="assignmentrelateduser", unique_together=set([("assignment", "user")])
),
]
|
gfreed/android_external_chromium-org | refs/heads/android-4.4 | net/tools/quic/benchmark/run_client.py | 165 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import csv
import datetime
import json
import os
import shlex
import subprocess
import sys
from optparse import OptionParser
"""Start a client to fetch web pages either using wget or using quic_client.
If --use_wget is set, it uses wget.
Usage: This invocation
run_client.py --quic_binary_dir=../../../../out/Debug \
--address=127.0.0.1 --port=5000 --infile=test_urls.json \
--delay_file=delay.csv --packets_file=packets.csv
fetches pages listed in test_urls.json from a quic server running at
127.0.0.1 on port 5000 using quic binary ../../../../out/Debug/quic_client
and stores the delay in delay.csv and the max received packet number (for
QUIC) in packets.csv.
If --use_wget is present, it will fetch the URLs using wget and ignores
the flags --address, --port, --quic_binary_dir, etc.
"""
def Timestamp(datetm=None):
"""Get the timestamp in microseconds.
Args:
datetm: the date and time to be converted to timestamp.
If not set, use the current UTC time.
Returns:
The timestamp in microseconds.
"""
datetm = datetm or datetime.datetime.utcnow()
diff = datetm - datetime.datetime.utcfromtimestamp(0)
timestamp = (diff.days * 86400 + diff.seconds) * 1000000 + diff.microseconds
return timestamp
class PageloadExperiment:
def __init__(self, use_wget, quic_binary_dir, quic_server_address,
quic_server_port):
"""Initialize PageloadExperiment.
Args:
use_wget: Whether to use wget.
quic_binary_dir: Directory for quic_binary.
quic_server_address: IP address of quic server.
quic_server_port: Port of the quic server.
"""
self.use_wget = use_wget
self.quic_binary_dir = quic_binary_dir
self.quic_server_address = quic_server_address
self.quic_server_port = quic_server_port
if not use_wget and not os.path.isfile(quic_binary_dir + '/quic_client'):
raise IOError('There is no quic_client in the given dir: %s.'
% quic_binary_dir)
@classmethod
def ReadPages(cls, json_file):
"""Return the list of URLs from the json_file.
One entry of the list may contain a html link and multiple resources.
"""
page_list = []
with open(json_file) as f:
data = json.load(f)
for page in data['pages']:
url = page['url']
if 'resources' in page:
resources = page['resources']
else:
resources = None
if not resources:
page_list.append([url])
else:
urls = [url]
# For url http://x.com/z/y.html, url_dir is http://x.com/z
url_dir = url.rsplit('/', 1)[0]
for resource in resources:
urls.append(url_dir + '/' + resource)
page_list.append(urls)
return page_list
def DownloadOnePage(self, urls):
"""Download a page emulated by a list of urls.
Args:
urls: list of URLs to fetch.
Returns:
A tuple (page download time, max packet number).
"""
if self.use_wget:
cmd = 'wget -O -'
else:
cmd = '%s/quic_client --port=%s --address=%s' % (
self.quic_binary_dir, self.quic_server_port, self.quic_server_address)
cmd_in_list = shlex.split(cmd)
cmd_in_list.extend(urls)
start_time = Timestamp()
ps_proc = subprocess.Popen(cmd_in_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_std_out, std_err = ps_proc.communicate()
end_time = Timestamp()
delta_time = end_time - start_time
max_packets = 0
if not self.use_wget:
for line in std_err.splitlines():
if line.find('Client: Got packet') >= 0:
elems = line.split()
packet_num = int(elems[4])
max_packets = max(max_packets, packet_num)
return delta_time, max_packets
def RunExperiment(self, infile, delay_file, packets_file=None, num_it=1):
"""Run the pageload experiment.
Args:
infile: Input json file describing the page list.
delay_file: Output file storing delay in csv format.
packets_file: Output file storing max packet number in csv format.
num_it: Number of iterations to run in this experiment.
"""
page_list = self.ReadPages(infile)
header = [urls[0].rsplit('/', 1)[1] for urls in page_list]
header0 = 'wget' if self.use_wget else 'quic'
header = [header0] + header
plt_list = []
packets_list = []
for i in range(num_it):
plt_one_row = [str(i)]
packets_one_row = [str(i)]
for urls in page_list:
time_micros, num_packets = self.DownloadOnePage(urls)
time_secs = time_micros / 1000000.0
plt_one_row.append('%6.3f' % time_secs)
packets_one_row.append('%5d' % num_packets)
plt_list.append(plt_one_row)
packets_list.append(packets_one_row)
with open(delay_file, 'w') as f:
csv_writer = csv.writer(f, delimiter=',')
csv_writer.writerow(header)
for one_row in plt_list:
csv_writer.writerow(one_row)
if packets_file:
with open(packets_file, 'w') as f:
csv_writer = csv.writer(f, delimiter=',')
csv_writer.writerow(header)
for one_row in packets_list:
csv_writer.writerow(one_row)
def main():
parser = OptionParser()
parser.add_option('--use_wget', dest='use_wget', action='store_true',
default=False)
# Note that only debug version generates the log containing packets
# information.
parser.add_option('--quic_binary_dir', dest='quic_binary_dir',
default='../../../../out/Debug')
# For whatever server address you specify, you need to run the
# quic_server on that machine and populate it with the cache containing
# the URLs requested in the --infile.
parser.add_option('--address', dest='quic_server_address',
default='127.0.0.1')
parser.add_option('--port', dest='quic_server_port',
default='5002')
parser.add_option('--delay_file', dest='delay_file', default='delay.csv')
parser.add_option('--packets_file', dest='packets_file',
default='packets.csv')
parser.add_option('--infile', dest='infile', default='test_urls.json')
(options, _) = parser.parse_args()
exp = PageloadExperiment(options.use_wget, options.quic_binary_dir,
options.quic_server_address,
options.quic_server_port)
exp.RunExperiment(options.infile, options.delay_file, options.packets_file)
if __name__ == '__main__':
sys.exit(main())
|
tlakshman26/cinder-new-branch | refs/heads/master | cinder/tests/unit/api/contrib/test_volume_encryption_metadata.py | 23 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import webob
from cinder.api.contrib import volume_encryption_metadata
from cinder import context
from cinder import db
from cinder import test
from cinder.tests.unit.api import fakes
def return_volume_type_encryption_metadata(context, volume_type_id):
return stub_volume_type_encryption()
def stub_volume_type_encryption():
values = {
'cipher': 'cipher',
'key_size': 256,
'provider': 'nova.volume.encryptors.base.VolumeEncryptor',
'volume_type_id': 'volume_type',
'control_location': 'front-end',
}
return values
class VolumeEncryptionMetadataTest(test.TestCase):
@staticmethod
def _create_volume(context,
display_name='test_volume',
display_description='this is a test volume',
status='creating',
availability_zone='fake_az',
host='fake_host',
size=1,
encryption_key_id='fake_key'):
"""Create a volume object."""
volume = {
'size': size,
'user_id': 'fake',
'project_id': 'fake',
'status': status,
'display_name': display_name,
'display_description': display_description,
'attach_status': 'detached',
'availability_zone': availability_zone,
'host': host,
'encryption_key_id': encryption_key_id,
}
return db.volume_create(context, volume)['id']
def setUp(self):
super(VolumeEncryptionMetadataTest, self).setUp()
self.controller = (volume_encryption_metadata.
VolumeEncryptionMetadataController())
self.stubs.Set(db.sqlalchemy.api, 'volume_type_encryption_get',
return_volume_type_encryption_metadata)
self.ctxt = context.RequestContext('fake', 'fake')
self.volume_id = self._create_volume(self.ctxt)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(),
self.volume_id)
def test_index(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption'
% self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
res_dict = json.loads(res.body)
expected = {
"encryption_key_id": "fake_key",
"control_location": "front-end",
"cipher": "cipher",
"provider": "nova.volume.encryptors.base.VolumeEncryptor",
"key_size": 256,
}
self.assertEqual(expected, res_dict)
def test_index_bad_tenant_id(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption'
% ('bad-tenant-id', self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(400, res.status_code)
res_dict = json.loads(res.body)
expected = {'badRequest': {'code': 400,
'message': 'Malformed request url'}}
self.assertEqual(expected, res_dict)
def test_index_bad_volume_id(self):
bad_volume_id = 'bad_volume_id'
req = webob.Request.blank('/v2/fake/volumes/%s/encryption'
% bad_volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(404, res.status_code)
res_dict = json.loads(res.body)
expected = {'itemNotFound': {'code': 404,
'message': 'VolumeNotFound: Volume '
'%s could not be found.'
% bad_volume_id}}
self.assertEqual(expected, res_dict)
def test_show_key(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('fake_key', res.body)
def test_show_control(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'control_location' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('front-end', res.body)
def test_show_provider(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'provider' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('nova.volume.encryptors.base.VolumeEncryptor',
res.body)
def test_show_bad_tenant_id(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' % ('bad-tenant-id',
self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(400, res.status_code)
res_dict = json.loads(res.body)
expected = {'badRequest': {'code': 400,
'message': 'Malformed request url'}}
self.assertEqual(expected, res_dict)
def test_show_bad_volume_id(self):
bad_volume_id = 'bad_volume_id'
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % bad_volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(404, res.status_code)
res_dict = json.loads(res.body)
expected = {'itemNotFound': {'code': 404,
'message': 'VolumeNotFound: Volume '
'%s could not be found.'
% bad_volume_id}}
self.assertEqual(expected, res_dict)
def test_retrieve_key_admin(self):
ctxt = context.RequestContext('fake', 'fake', is_admin=True)
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('fake_key', res.body)
def test_show_volume_not_encrypted_type(self):
self.stubs.Set(db.sqlalchemy.api, 'volume_type_encryption_get',
lambda *args, **kwargs: None)
volume_id = self._create_volume(self.ctxt, encryption_key_id=None)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(), volume_id)
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(0, len(res.body))
def test_index_volume_not_encrypted_type(self):
self.stubs.Set(db.sqlalchemy.api, 'volume_type_encryption_get',
lambda *args, **kwargs: None)
volume_id = self._create_volume(self.ctxt, encryption_key_id=None)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(), volume_id)
req = webob.Request.blank('/v2/fake/volumes/%s/encryption'
% volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
res_dict = json.loads(res.body)
expected = {
'encryption_key_id': None
}
self.assertEqual(expected, res_dict)
|
fmyzjs/horizon-hacker | refs/heads/master | horizon/test/test_dashboards/dogs/puppies/urls.py | 46 | from django.conf.urls.defaults import patterns, url
from .views import IndexView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
)
|
choonho/qemu-beagle | refs/heads/master | scripts/qemu-gdb.py | 12 | #!/usr/bin/python
# GDB debugging support
#
# Copyright 2012 Red Hat, Inc. and/or its affiliates
#
# Authors:
# Avi Kivity <avi@redhat.com>
#
# This work is licensed under the terms of the GNU GPL, version 2. See
# the COPYING file in the top-level directory.
#
# Contributions after 2012-01-13 are licensed under the terms of the
# GNU GPL, version 2 or (at your option) any later version.
import gdb
def isnull(ptr):
return ptr == gdb.Value(0).cast(ptr.type)
def int128(p):
return long(p['lo']) + (long(p['hi']) << 64)
def get_fs_base():
'''Fetch %fs base value using arch_prctl(ARCH_GET_FS)'''
# %rsp - 120 is scratch space according to the SystemV ABI
old = gdb.parse_and_eval('*(uint64_t*)($rsp - 120)')
gdb.execute('call arch_prctl(0x1003, $rsp - 120)', False, True)
fs_base = gdb.parse_and_eval('*(uint64_t*)($rsp - 120)')
gdb.execute('set *(uint64_t*)($rsp - 120) = %s' % old, False, True)
return fs_base
def get_glibc_pointer_guard():
'''Fetch glibc pointer guard value'''
fs_base = get_fs_base()
return gdb.parse_and_eval('*(uint64_t*)((uint64_t)%s + 0x30)' % fs_base)
def glibc_ptr_demangle(val, pointer_guard):
'''Undo effect of glibc's PTR_MANGLE()'''
return gdb.parse_and_eval('(((uint64_t)%s >> 0x11) | ((uint64_t)%s << (64 - 0x11))) ^ (uint64_t)%s' % (val, val, pointer_guard))
def bt_jmpbuf(jmpbuf):
'''Backtrace a jmpbuf'''
JB_RBX = 0
JB_RBP = 1
JB_R12 = 2
JB_R13 = 3
JB_R14 = 4
JB_R15 = 5
JB_RSP = 6
JB_PC = 7
old_rbx = gdb.parse_and_eval('(uint64_t)$rbx')
old_rbp = gdb.parse_and_eval('(uint64_t)$rbp')
old_rsp = gdb.parse_and_eval('(uint64_t)$rsp')
old_r12 = gdb.parse_and_eval('(uint64_t)$r12')
old_r13 = gdb.parse_and_eval('(uint64_t)$r13')
old_r14 = gdb.parse_and_eval('(uint64_t)$r14')
old_r15 = gdb.parse_and_eval('(uint64_t)$r15')
old_rip = gdb.parse_and_eval('(uint64_t)$rip')
pointer_guard = get_glibc_pointer_guard()
gdb.execute('set $rbx = %s' % jmpbuf[JB_RBX])
gdb.execute('set $rbp = %s' % glibc_ptr_demangle(jmpbuf[JB_RBP], pointer_guard))
gdb.execute('set $rsp = %s' % glibc_ptr_demangle(jmpbuf[JB_RSP], pointer_guard))
gdb.execute('set $r12 = %s' % jmpbuf[JB_R12])
gdb.execute('set $r13 = %s' % jmpbuf[JB_R13])
gdb.execute('set $r14 = %s' % jmpbuf[JB_R14])
gdb.execute('set $r15 = %s' % jmpbuf[JB_R15])
gdb.execute('set $rip = %s' % glibc_ptr_demangle(jmpbuf[JB_PC], pointer_guard))
gdb.execute('bt')
gdb.execute('set $rbx = %s' % old_rbx)
gdb.execute('set $rbp = %s' % old_rbp)
gdb.execute('set $rsp = %s' % old_rsp)
gdb.execute('set $r12 = %s' % old_r12)
gdb.execute('set $r13 = %s' % old_r13)
gdb.execute('set $r14 = %s' % old_r14)
gdb.execute('set $r15 = %s' % old_r15)
gdb.execute('set $rip = %s' % old_rip)
class QemuCommand(gdb.Command):
'''Prefix for QEMU debug support commands'''
def __init__(self):
gdb.Command.__init__(self, 'qemu', gdb.COMMAND_DATA,
gdb.COMPLETE_NONE, True)
class CoroutineCommand(gdb.Command):
'''Display coroutine backtrace'''
def __init__(self):
gdb.Command.__init__(self, 'qemu coroutine', gdb.COMMAND_DATA,
gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
argv = gdb.string_to_argv(arg)
if len(argv) != 1:
gdb.write('usage: qemu coroutine <coroutine-pointer>\n')
return
coroutine_pointer = gdb.parse_and_eval(argv[0]).cast(gdb.lookup_type('CoroutineUContext').pointer())
bt_jmpbuf(coroutine_pointer['env']['__jmpbuf'])
class MtreeCommand(gdb.Command):
'''Display the memory tree hierarchy'''
def __init__(self):
gdb.Command.__init__(self, 'qemu mtree', gdb.COMMAND_DATA,
gdb.COMPLETE_NONE)
self.queue = []
def invoke(self, arg, from_tty):
self.seen = set()
self.queue_root('address_space_memory')
self.queue_root('address_space_io')
self.process_queue()
def queue_root(self, varname):
ptr = gdb.parse_and_eval(varname)['root']
self.queue.append(ptr)
def process_queue(self):
while self.queue:
ptr = self.queue.pop(0)
if long(ptr) in self.seen:
continue
self.print_item(ptr)
def print_item(self, ptr, offset = gdb.Value(0), level = 0):
self.seen.add(long(ptr))
addr = ptr['addr']
addr += offset
size = int128(ptr['size'])
alias = ptr['alias']
klass = ''
if not isnull(alias):
klass = ' (alias)'
elif not isnull(ptr['ops']):
klass = ' (I/O)'
elif bool(ptr['ram']):
klass = ' (RAM)'
gdb.write('%s%016x-%016x %s%s (@ %s)\n'
% (' ' * level,
long(addr),
long(addr + (size - 1)),
ptr['name'].string(),
klass,
ptr,
),
gdb.STDOUT)
if not isnull(alias):
gdb.write('%s alias: %s@%016x (@ %s)\n' %
(' ' * level,
alias['name'].string(),
ptr['alias_offset'],
alias,
),
gdb.STDOUT)
self.queue.append(alias)
subregion = ptr['subregions']['tqh_first']
level += 1
while not isnull(subregion):
self.print_item(subregion, addr, level)
subregion = subregion['subregions_link']['tqe_next']
QemuCommand()
CoroutineCommand()
MtreeCommand()
|
jbedorf/tensorflow | refs/heads/master | tensorflow/python/data/experimental/kernel_tests/optimization/shuffle_and_repeat_fusion_test.py | 10 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the `ShuffleAndRepeatFusion` optimization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.experimental.ops import optimization
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class ShuffleAndRepeatFusionTest(test_base.DatasetTestBase):
def testShuffleAndRepeatFusion(self):
dataset = dataset_ops.Dataset.range(10).apply(
optimization.assert_next(["ShuffleAndRepeat"])).shuffle(10).repeat(2)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.shuffle_and_repeat_fusion = True
dataset = dataset.with_options(options)
get_next = self.getNext(dataset)
for _ in range(2):
results = []
for _ in range(10):
results.append(self.evaluate(get_next()))
self.assertAllEqual([x for x in range(10)], sorted(results))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
if __name__ == "__main__":
test.main()
|
rew4332/tensorflow | refs/heads/rew4332-patch-1 | tensorflow/contrib/learn/python/learn/dataframe/transforms/difference.py | 4 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `Transform` that performs subtraction on two `Series`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.dataframe import series
from tensorflow.contrib.learn.python.learn.dataframe import transform
from tensorflow.python.framework import ops
from tensorflow.python.ops import sparse_ops
def _negate_sparse(sparse_tensor):
return ops.SparseTensor(indices=sparse_tensor.indices,
values=-sparse_tensor.values,
shape=sparse_tensor.shape)
@series.Series.register_binary_op("__sub__")
class Difference(transform.Transform):
"""Subtracts one 'Series` from another."""
def __init__(self):
super(Difference, self).__init__()
@property
def name(self):
return "difference"
@property
def input_valency(self):
return 2
@property
def _output_names(self):
return "output",
def _apply_transform(self, input_tensors, **kwargs):
pair_sparsity = (isinstance(input_tensors[0], ops.SparseTensor),
isinstance(input_tensors[1], ops.SparseTensor))
if pair_sparsity == (False, False):
result = input_tensors[0] - input_tensors[1]
# note tf.sparse_add accepts the mixed cases,
# so long as at least one input is sparse.
elif not pair_sparsity[1]:
result = sparse_ops.sparse_add(input_tensors[0], - input_tensors[1])
else:
result = sparse_ops.sparse_add(input_tensors[0],
_negate_sparse(input_tensors[1]))
# pylint: disable=not-callable
return self.return_type(result)
|
LaurentClaessens/phystricks | refs/heads/master | testing/demonstration/phystricksVNJWooDeKdcy.py | 1 | # -*- coding: utf8 -*-
from __future__ import division
from phystricks import *
def VNJWooDeKdcy():
pspict,fig = SinglePicture("VNJWooDeKdcy")
pspict.dilatation(0.8)
A=Point(0,0)
O=Point(3,2)
B=Point(1.9266624057974,-0.751559074504527)
s1=Segment(A,O)
s2=Segment(B,O)
angle=AngleAOB(A,O,B)
angle.put_mark(text="\SI{33}{\degree}",pspict=pspict)
pspict.DrawGraphs(s1,s2,angle)
fig.no_figure()
fig.conclude()
fig.write_the_file()
|
biomodels/BIOMD0000000017 | refs/heads/master | BIOMD0000000017/model.py | 1 | import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000017.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) |
roca-nation/microblog | refs/heads/master | profile.py | 28 | #!flask/bin/python
from werkzeug.contrib.profiler import ProfilerMiddleware
from app import app
app.config['PROFILE'] = True
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
app.run(debug=True)
|
google-research/google-research | refs/heads/master | aloe/aloe/common/plot_2d.py | 1 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
def plot_heatmap(pdf_func, out_name, size=3):
w = 100
x = np.linspace(-size, size, w)
y = np.linspace(-size, size, w)
xx, yy = np.meshgrid(x, y)
coords = np.stack([xx.flatten(), yy.flatten()]).transpose()
scores = pdf_func(coords)
a = scores.reshape((w, w))
plt.imshow(a)
plt.axis('equal')
plt.axis('off')
plt.savefig(out_name, bbox_inches='tight')
plt.close()
def plot_samples(samples, out_name, lim=None, axis=True):
plt.scatter(samples[:, 0], samples[:, 1], marker='.')
plt.axis('equal')
if lim is not None:
plt.xlim(-lim, lim)
plt.ylim(-lim, lim)
if not axis:
plt.axis('off')
plt.savefig(out_name, bbox_inches='tight')
plt.close()
def plot_joint(dataset, samples, out_name):
x = np.max(dataset)
y = np.max(-dataset)
z = np.ceil(max((x, y)))
plt.scatter(dataset[:, 0], dataset[:, 1], c='r', marker='x')
plt.scatter(samples[:, 0], samples[:, 1], c='b', marker='.')
plt.legend(['training data', 'ADE sampled'])
plt.axis('equal')
plt.xlim(-z, z)
plt.ylim(-z, z)
plt.savefig(out_name, bbox_inches='tight')
plt.close()
fname = out_name.split('/')[-1]
out_name = '/'.join(out_name.split('/')[:-1]) + '/none-' + fname
plt.figure(figsize=(8, 8))
plt.scatter(dataset[:, 0], dataset[:, 1], c='r', marker='x')
plt.scatter(samples[:, 0], samples[:, 1], c='b', marker='.')
plt.axis('equal')
plt.xlim(-z, z)
plt.ylim(-z, z)
plt.savefig(out_name, bbox_inches='tight')
plt.close()
|
cloudbase/nova-virtualbox | refs/heads/virtualbox_driver | nova/api/openstack/compute/plugins/v3/quota_sets.py | 1 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import strutils
import six.moves.urllib.parse as urlparse
import webob
from nova.api.openstack.compute.schemas.v3 import quota_sets
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
import nova.context
from nova import exception
from nova.i18n import _
from nova import objects
from nova import quota
ALIAS = "os-quota-sets"
QUOTAS = quota.QUOTAS
authorize_update = extensions.extension_authorizer('compute',
'v3:%s:update' % ALIAS)
authorize_show = extensions.extension_authorizer('compute',
'v3:%s:show' % ALIAS)
authorize_delete = extensions.extension_authorizer('compute',
'v3:%s:delete' % ALIAS)
authorize_detail = extensions.extension_authorizer('compute',
'v3:%s:detail' % ALIAS)
class QuotaSetsController(wsgi.Controller):
def _format_quota_set(self, project_id, quota_set):
"""Convert the quota object to a result dict."""
if project_id:
result = dict(id=str(project_id))
else:
result = {}
for resource in QUOTAS.resources:
if resource in quota_set:
result[resource] = quota_set[resource]
return dict(quota_set=result)
def _validate_quota_limit(self, resource, limit, minimum, maximum):
# NOTE: -1 is a flag value for unlimited
if limit < -1:
msg = (_("Quota limit %(limit)s for %(resource)s "
"must be -1 or greater.") %
{'limit': limit, 'resource': resource})
raise webob.exc.HTTPBadRequest(explanation=msg)
def conv_inf(value):
return float("inf") if value == -1 else value
if conv_inf(limit) < conv_inf(minimum):
msg = (_("Quota limit %(limit)s for %(resource)s must "
"be greater than or equal to already used and "
"reserved %(minimum)s.") %
{'limit': limit, 'resource': resource, 'minimum': minimum})
raise webob.exc.HTTPBadRequest(explanation=msg)
if conv_inf(limit) > conv_inf(maximum):
msg = (_("Quota limit %(limit)s for %(resource)s must be "
"less than or equal to %(maximum)s.") %
{'limit': limit, 'resource': resource, 'maximum': maximum})
raise webob.exc.HTTPBadRequest(explanation=msg)
def _get_quotas(self, context, id, user_id=None, usages=False):
if user_id:
values = QUOTAS.get_user_quotas(context, id, user_id,
usages=usages)
else:
values = QUOTAS.get_project_quotas(context, id, usages=usages)
if usages:
return values
else:
return {k: v['limit'] for k, v in values.items()}
@extensions.expected_errors(403)
def show(self, req, id):
context = req.environ['nova.context']
authorize_show(context)
params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
user_id = params.get('user_id', [None])[0]
try:
nova.context.authorize_project_context(context, id)
return self._format_quota_set(id,
self._get_quotas(context, id, user_id=user_id))
except exception.Forbidden:
raise webob.exc.HTTPForbidden()
@extensions.expected_errors(403)
def detail(self, req, id):
context = req.environ['nova.context']
authorize_detail(context)
user_id = req.GET.get('user_id', None)
try:
nova.context.authorize_project_context(context, id)
return self._format_quota_set(id, self._get_quotas(context, id,
user_id=user_id,
usages=True))
except exception.Forbidden:
raise webob.exc.HTTPForbidden()
@extensions.expected_errors((400, 403))
@validation.schema(quota_sets.update)
def update(self, req, id, body):
context = req.environ['nova.context']
authorize_update(context)
project_id = id
params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
user_id = params.get('user_id', [None])[0]
quota_set = body['quota_set']
force_update = strutils.bool_from_string(quota_set.get('force',
'False'))
try:
settable_quotas = QUOTAS.get_settable_quotas(context, project_id,
user_id=user_id)
except exception.Forbidden:
raise webob.exc.HTTPForbidden()
for key, value in body['quota_set'].iteritems():
if key == 'force' or (not value and value != 0):
continue
# validate whether already used and reserved exceeds the new
# quota, this check will be ignored if admin want to force
# update
value = int(value)
if not force_update:
minimum = settable_quotas[key]['minimum']
maximum = settable_quotas[key]['maximum']
self._validate_quota_limit(key, value, minimum, maximum)
try:
objects.Quotas.create_limit(context, project_id,
key, value, user_id=user_id)
except exception.QuotaExists:
objects.Quotas.update_limit(context, project_id,
key, value, user_id=user_id)
except exception.AdminRequired:
raise webob.exc.HTTPForbidden()
# Note(gmann): Removed 'id' from update's response to make it same
# as V2. If needed it can be added with microversion.
return self._format_quota_set(None, self._get_quotas(context, id,
user_id=user_id))
@extensions.expected_errors(())
def defaults(self, req, id):
context = req.environ['nova.context']
authorize_show(context)
values = QUOTAS.get_defaults(context)
return self._format_quota_set(id, values)
# TODO(oomichi): Here should be 204(No Content) instead of 202 by v2.1
# +microversions because the resource quota-set has been deleted completely
# when returning a response.
@extensions.expected_errors(403)
@wsgi.response(202)
def delete(self, req, id):
context = req.environ['nova.context']
authorize_delete(context)
params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
user_id = params.get('user_id', [None])[0]
try:
nova.context.authorize_project_context(context, id)
if user_id:
QUOTAS.destroy_all_by_project_and_user(context,
id, user_id)
else:
QUOTAS.destroy_all_by_project(context, id)
except exception.Forbidden:
raise webob.exc.HTTPForbidden()
class QuotaSets(extensions.V3APIExtensionBase):
"""Quotas management support."""
name = "Quotas"
alias = ALIAS
version = 1
def get_resources(self):
resources = []
res = extensions.ResourceExtension(ALIAS,
QuotaSetsController(),
member_actions={'defaults': 'GET',
'detail': 'GET'})
resources.append(res)
return resources
def get_controller_extensions(self):
return []
|
sgelb/impositioner | refs/heads/master | impositioner/cli.py | 1 | #!/usr/bin/env python
"""
Main entry point for command-line program, invoke as `impositioner'
"""
from sys import exit
from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter
import textwrap
from pdfrw import PdfReader
import math
from . import core
from . import __version__
from typing import Dict, List
def parse_arguments() -> Namespace:
parser = ArgumentParser(
prog="impositioner",
formatter_class=RawDescriptionHelpFormatter,
description=textwrap.dedent(
"""
Impose PDF file for booklet printing
"""
),
epilog=textwrap.dedent(
"""
Examples:
Print 4 pages on an A4 sheet for creating an A6 booklet:
$ %(prog)s -n 4 -f a4 input.pdf
Create booklet with binding on right side and signatures of 20 pages:
$ %(prog)s -b right -s 20 input.pdf
Create booklet with custom output format. Center each page before
combining:
$ %(prog)s -f 209.5x209.5 -c input.pdf
"""
),
)
# positional argument
parser.add_argument("PDF", action="store", help="PDF file")
# optional arguments
parser.add_argument(
"-n",
dest="nup",
metavar="N",
action="store",
type=int,
default="2",
help="Pages per sheet (default: 2)",
)
parser.add_argument(
"-f",
dest="paperformat",
action="store",
type=str.lower,
metavar="FORMAT",
help="Output paper sheet format. Must be standard"
" paper format (A4, letter, ...) or custom"
" WIDTHxHEIGHT (default: auto)",
)
parser.add_argument(
"-u",
dest="unit",
action="store",
default="mm",
choices=["cm", "inch", "mm"],
help="Unit if using -f with custom format" " (default: mm)",
)
parser.add_argument(
"-b",
dest="binding",
action="store",
type=str.lower,
choices=["left", "top", "right", "bottom"],
default="left",
help="Side of binding (default: left)",
)
parser.add_argument(
"-c",
dest="center_subpage",
action="store_true",
help="Center each page when resizing. Has no effect if"
" output format is multiple of input format (default:"
" center combinated pages)",
)
parser.add_argument(
"-s",
dest="signature_length",
action="store",
type=int,
default=-1,
help="Signature length. Set to 0 to disable " "signatures (default: auto)",
)
parser.add_argument(
"-d",
dest="divider",
action="store_true",
default=False,
help="Insert blank sheets between signature stacks to"
" ease separation after printing",
)
parser.add_argument(
"-v", dest="verbose", action="store_true", default=False, help="Verbose output"
)
parser.add_argument(
"--version", action="version", version="%(prog)s {}".format(__version__)
)
return parser.parse_args()
def main() -> None:
args: Namespace = parse_arguments()
# validate cli arguments
infile: str = core.validate_infile(args.PDF)
signature_length: int = core.validate_signature_length(args.signature_length)
papersize: Dict[str, List[int]] = core.validate_papersize(
args.paperformat, args.unit
)
pages_per_sheet: int = core.validate_pages_per_sheet(args.nup)
# read pdf file
inpages: List = PdfReader(infile).pages
page_count: int = len(inpages)
# calculate signature length, if not set manually through cli argument
if signature_length == 0:
# signatures are disabled, just pad to multiple of 4
signature_length = page_count + core.reverse_remainder(page_count, 4)
if signature_length < 0:
# calculate signature length
signature_length = core.calculate_signature_length(page_count)
signature_count: int = math.ceil(page_count / signature_length)
# pad with blank pages
blank_pages_count: int = signature_length * signature_count - page_count
if blank_pages_count:
inpages.extend([core.create_blank_copy(inpages[0])] * blank_pages_count)
# calculate output size of single page for centering content
output_size: List[int] = None
if papersize and args.center_subpage:
output_size = core.calculate_scaled_sub_page_size(pages_per_sheet, papersize)
# impose and merge pages, creating sheets
sheets: List = core.impose_and_merge(
inpages, signature_length, pages_per_sheet, output_size, args.binding
)
# add divider pages
if args.divider:
sheets = core.add_divider(sheets, signature_length)
# resize result
if papersize:
sheets = core.resize(sheets, papersize)
# print infos
if args.verbose:
for line in textwrap.wrap(
"Standard paper formats: {}".format(
", ".join(sorted(core.paperformats.keys()))
),
80,
):
print(line)
print("Total input page: {:>3}".format(page_count))
print("Total output page: {:>3}".format(len(sheets)))
input_size = inpages[0].MediaBox[2:]
output_size = sheets[0].MediaBox[2:]
divider_count = 2 * signature_count - 2 if args.divider else 0
print("Input size: {}x{}".format(input_size[0], input_size[1]))
print("Output size: {}x{}".format(output_size[0], output_size[1]))
print("Signature length: {:>3}".format(signature_length))
print("Signature count: {:>3}".format(signature_count))
print("Divider pages: {:>3}".format(divider_count))
# save imposed pdf
core.save_pdf(infile, sheets)
print("Imposed PDF file saved to {}".format(core.create_filename(infile)))
if __name__ == "__main__":
exit(main())
|
kubov/rp | refs/heads/master | scripts/extract_archive_from_universal_bin.py | 46 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# extract_archives_from_universal_bin.py - It extracts the binaries embeded into a Universal Binary Mach-o
# Copyright (C) 2012 Axel "0vercl0k" Souchet - http://www.twitter.com/0vercl0k
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
from struct import unpack
# http://hohle.net/scrap_post.php?post=197
def u(r):
return unpack('>I', r)[0]
def dump_archive(f, offset, size, name):
f.seek(offset)
data = f.read(size)
hfile = open(name, 'wb')
hfile.write(data)
hfile.close()
def main(argc, argv):
if argc != 2:
print './dump <bin>'
return -1
f = open(argv[1], 'rb')
magic = f.read(4)
if u(magic) != 0xcafebabe:
print "Your file doesn't seem to be a universal binary: %#.8x" % u(magic)
return -1
nb_archive = u(f.read(4))
for i in range(nb_archive):
f.read(4) #cputype
f.read(4) #cpusubtype
offset = u(f.read(4))
size = u(f.read(4))
print 'Dumping %#.8x bytes @%#.8x ' % (size, offset)
b = f.tell()
dump_archive(f, offset, size, 'dumpz/bin%d' % i)
f.seek(b)
f.read(4) #alignement
print 'eof'
return 1
if __name__ == '__main__':
sys.exit(main(len(sys.argv), sys.argv)) |
frouty/odoogoeen | refs/heads/prod | addons/point_of_sale/wizard/pos_sales_user.py | 55 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
class pos_sale_user(osv.osv_memory):
_name = 'pos.sale.user'
_description = 'Sale by User'
_columns = {
'date_start': fields.date('Date Start', required=True),
'date_end': fields.date('Date End', required=True),
'user_id': fields.many2many('res.users', 'sale_user_rel', 'user_id', 'uid', 'Salesperson'),
}
def print_report(self, cr, uid, ids, context=None):
"""
To get the date and print the report
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : return report
"""
if context is None:
context = {}
datas = {'ids': context.get('active_ids', [])}
res = self.read(cr, uid, ids, ['date_start', 'date_end', 'user_id'], context=context)
res = res and res[0] or {}
datas['form'] = res
return {
'type': 'ir.actions.report.xml',
'report_name': 'pos.sales.user',
'datas': datas,
}
pos_sale_user()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Eric-Gaudiello/tensorflow_dev | refs/heads/master | tensorflow_home/tensorflow_venv/lib/python3.4/site-packages/numpy/core/tests/test_indexing.py | 33 | from __future__ import division, absolute_import, print_function
import sys
import warnings
import functools
import numpy as np
from numpy.core.multiarray_tests import array_indexing
from itertools import product
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_raises,
assert_array_equal, assert_warns
)
try:
cdll = np.ctypeslib.load_library('multiarray', np.core.multiarray.__file__)
_HAS_CTYPE = True
except ImportError:
_HAS_CTYPE = False
class TestIndexing(TestCase):
def test_none_index(self):
# `None` index adds newaxis
a = np.array([1, 2, 3])
assert_equal(a[None], a[np.newaxis])
assert_equal(a[None].ndim, a.ndim + 1)
def test_empty_tuple_index(self):
# Empty tuple index creates a view
a = np.array([1, 2, 3])
assert_equal(a[()], a)
assert_(a[()].base is a)
a = np.array(0)
assert_(isinstance(a[()], np.int_))
# Regression, it needs to fall through integer and fancy indexing
# cases, so need the with statement to ignore the non-integer error.
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '', DeprecationWarning)
a = np.array([1.])
assert_(isinstance(a[0.], np.float_))
a = np.array([np.array(1)], dtype=object)
assert_(isinstance(a[0.], np.ndarray))
def test_same_kind_index_casting(self):
# Indexes should be cast with same-kind and not safe, even if
# that is somewhat unsafe. So test various different code paths.
index = np.arange(5)
u_index = index.astype(np.uintp)
arr = np.arange(10)
assert_array_equal(arr[index], arr[u_index])
arr[u_index] = np.arange(5)
assert_array_equal(arr, np.arange(10))
arr = np.arange(10).reshape(5, 2)
assert_array_equal(arr[index], arr[u_index])
arr[u_index] = np.arange(5)[:,None]
assert_array_equal(arr, np.arange(5)[:,None].repeat(2, axis=1))
arr = np.arange(25).reshape(5, 5)
assert_array_equal(arr[u_index, u_index], arr[index, index])
def test_empty_fancy_index(self):
# Empty list index creates an empty array
# with the same dtype (but with weird shape)
a = np.array([1, 2, 3])
assert_equal(a[[]], [])
assert_equal(a[[]].dtype, a.dtype)
b = np.array([], dtype=np.intp)
assert_equal(a[[]], [])
assert_equal(a[[]].dtype, a.dtype)
b = np.array([])
assert_raises(IndexError, a.__getitem__, b)
def test_ellipsis_index(self):
# Ellipsis index does not create a view
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
assert_equal(a[...], a)
assert_(a[...].base is a) # `a[...]` was `a` in numpy <1.9.)
# Slicing with ellipsis can skip an
# arbitrary number of dimensions
assert_equal(a[0, ...], a[0])
assert_equal(a[0, ...], a[0,:])
assert_equal(a[..., 0], a[:, 0])
# Slicing with ellipsis always results
# in an array, not a scalar
assert_equal(a[0, ..., 1], np.array(2))
# Assignment with `(Ellipsis,)` on 0-d arrays
b = np.array(1)
b[(Ellipsis,)] = 2
assert_equal(b, 2)
def test_single_int_index(self):
# Single integer index selects one row
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
assert_equal(a[0], [1, 2, 3])
assert_equal(a[-1], [7, 8, 9])
# Index out of bounds produces IndexError
assert_raises(IndexError, a.__getitem__, 1 << 30)
# Index overflow produces IndexError
assert_raises(IndexError, a.__getitem__, 1 << 64)
def test_single_bool_index(self):
# Single boolean index
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
# Python boolean converts to integer
# These are being deprecated (and test in test_deprecations)
#assert_equal(a[True], a[1])
#assert_equal(a[False], a[0])
# Same with NumPy boolean scalar
# Before DEPRECATE, this is an error (as always, but telling about
# future change):
assert_raises(IndexError, a.__getitem__, np.array(True))
assert_raises(IndexError, a.__getitem__, np.array(False))
# After DEPRECATE, this behaviour can be enabled:
#assert_equal(a[np.array(True)], a[None])
#assert_equal(a[np.array(False), a[None][0:0]])
def test_boolean_indexing_onedim(self):
# Indexing a 2-dimensional array with
# boolean array of length one
a = np.array([[ 0., 0., 0.]])
b = np.array([ True], dtype=bool)
assert_equal(a[b], a)
# boolean assignment
a[b] = 1.
assert_equal(a, [[1., 1., 1.]])
def test_boolean_assignment_value_mismatch(self):
# A boolean assignment should fail when the shape of the values
# cannot be broadcast to the subscription. (see also gh-3458)
a = np.arange(4)
def f(a, v):
a[a > -1] = v
assert_raises(ValueError, f, a, [])
assert_raises(ValueError, f, a, [1, 2, 3])
assert_raises(ValueError, f, a[:1], [1, 2, 3])
def test_boolean_indexing_twodim(self):
# Indexing a 2-dimensional array with
# 2-dimensional boolean array
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
b = np.array([[ True, False, True],
[False, True, False],
[ True, False, True]])
assert_equal(a[b], [1, 3, 5, 7, 9])
assert_equal(a[b[1]], [[4, 5, 6]])
assert_equal(a[b[0]], a[b[2]])
# boolean assignment
a[b] = 0
assert_equal(a, [[0, 2, 0],
[4, 0, 6],
[0, 8, 0]])
def test_reverse_strides_and_subspace_bufferinit(self):
# This tests that the strides are not reversed for simple and
# subspace fancy indexing.
a = np.ones(5)
b = np.zeros(5, dtype=np.intp)[::-1]
c = np.arange(5)[::-1]
a[b] = c
# If the strides are not reversed, the 0 in the arange comes last.
assert_equal(a[0], 0)
# This also tests that the subspace buffer is initialized:
a = np.ones((5, 2))
c = np.arange(10).reshape(5, 2)[::-1]
a[b, :] = c
assert_equal(a[0], [0, 1])
def test_reversed_strides_result_allocation(self):
# Test a bug when calculating the output strides for a result array
# when the subspace size was 1 (and test other cases as well)
a = np.arange(10)[:, None]
i = np.arange(10)[::-1]
assert_array_equal(a[i], a[i.copy('C')])
a = np.arange(20).reshape(-1, 2)
def test_uncontiguous_subspace_assignment(self):
# During development there was a bug activating a skip logic
# based on ndim instead of size.
a = np.full((3, 4, 2), -1)
b = np.full((3, 4, 2), -1)
a[[0, 1]] = np.arange(2 * 4 * 2).reshape(2, 4, 2).T
b[[0, 1]] = np.arange(2 * 4 * 2).reshape(2, 4, 2).T.copy()
assert_equal(a, b)
def test_too_many_fancy_indices_special_case(self):
# Just documents behaviour, this is a small limitation.
a = np.ones((1,) * 32) # 32 is NPY_MAXDIMS
assert_raises(IndexError, a.__getitem__, (np.array([0]),) * 32)
def test_scalar_array_bool(self):
# Numpy bools can be used as boolean index (python ones as of yet not)
a = np.array(1)
assert_equal(a[np.bool_(True)], a[np.array(True)])
assert_equal(a[np.bool_(False)], a[np.array(False)])
# After deprecating bools as integers:
#a = np.array([0,1,2])
#assert_equal(a[True, :], a[None, :])
#assert_equal(a[:, True], a[:, None])
#
#assert_(not np.may_share_memory(a, a[True, :]))
def test_everything_returns_views(self):
# Before `...` would return a itself.
a = np.arange(5)
assert_(a is not a[()])
assert_(a is not a[...])
assert_(a is not a[:])
def test_broaderrors_indexing(self):
a = np.zeros((5, 5))
assert_raises(IndexError, a.__getitem__, ([0, 1], [0, 1, 2]))
assert_raises(IndexError, a.__setitem__, ([0, 1], [0, 1, 2]), 0)
def test_trivial_fancy_out_of_bounds(self):
a = np.zeros(5)
ind = np.ones(20, dtype=np.intp)
ind[-1] = 10
assert_raises(IndexError, a.__getitem__, ind)
assert_raises(IndexError, a.__setitem__, ind, 0)
ind = np.ones(20, dtype=np.intp)
ind[0] = 11
assert_raises(IndexError, a.__getitem__, ind)
assert_raises(IndexError, a.__setitem__, ind, 0)
def test_nonbaseclass_values(self):
class SubClass(np.ndarray):
def __array_finalize__(self, old):
# Have array finalize do funny things
self.fill(99)
a = np.zeros((5, 5))
s = a.copy().view(type=SubClass)
s.fill(1)
a[[0, 1, 2, 3, 4], :] = s
assert_((a == 1).all())
# Subspace is last, so transposing might want to finalize
a[:, [0, 1, 2, 3, 4]] = s
assert_((a == 1).all())
a.fill(0)
a[...] = s
assert_((a == 1).all())
def test_subclass_writeable(self):
d = np.rec.array([('NGC1001', 11), ('NGC1002', 1.), ('NGC1003', 1.)],
dtype=[('target', 'S20'), ('V_mag', '>f4')])
ind = np.array([False, True, True], dtype=bool)
assert_(d[ind].flags.writeable)
ind = np.array([0, 1])
assert_(d[ind].flags.writeable)
assert_(d[...].flags.writeable)
assert_(d[0].flags.writeable)
def test_memory_order(self):
# This is not necessary to preserve. Memory layouts for
# more complex indices are not as simple.
a = np.arange(10)
b = np.arange(10).reshape(5,2).T
assert_(a[b].flags.f_contiguous)
# Takes a different implementation branch:
a = a.reshape(-1, 1)
assert_(a[b, 0].flags.f_contiguous)
def test_scalar_return_type(self):
# Full scalar indices should return scalars and object
# arrays should not call PyArray_Return on their items
class Zero(object):
# The most basic valid indexing
def __index__(self):
return 0
z = Zero()
class ArrayLike(object):
# Simple array, should behave like the array
def __array__(self):
return np.array(0)
a = np.zeros(())
assert_(isinstance(a[()], np.float_))
a = np.zeros(1)
assert_(isinstance(a[z], np.float_))
a = np.zeros((1, 1))
assert_(isinstance(a[z, np.array(0)], np.float_))
assert_(isinstance(a[z, ArrayLike()], np.float_))
# And object arrays do not call it too often:
b = np.array(0)
a = np.array(0, dtype=object)
a[()] = b
assert_(isinstance(a[()], np.ndarray))
a = np.array([b, None])
assert_(isinstance(a[z], np.ndarray))
a = np.array([[b, None]])
assert_(isinstance(a[z, np.array(0)], np.ndarray))
assert_(isinstance(a[z, ArrayLike()], np.ndarray))
def test_small_regressions(self):
# Reference count of intp for index checks
a = np.array([0])
refcount = sys.getrefcount(np.dtype(np.intp))
# item setting always checks indices in separate function:
a[np.array([0], dtype=np.intp)] = 1
a[np.array([0], dtype=np.uint8)] = 1
assert_raises(IndexError, a.__setitem__,
np.array([1], dtype=np.intp), 1)
assert_raises(IndexError, a.__setitem__,
np.array([1], dtype=np.uint8), 1)
assert_equal(sys.getrefcount(np.dtype(np.intp)), refcount)
def test_unaligned(self):
v = (np.zeros(64, dtype=np.int8) + ord('a'))[1:-7]
d = v.view(np.dtype("S8"))
# unaligned source
x = (np.zeros(16, dtype=np.int8) + ord('a'))[1:-7]
x = x.view(np.dtype("S8"))
x[...] = np.array("b" * 8, dtype="S")
b = np.arange(d.size)
#trivial
assert_equal(d[b], d)
d[b] = x
# nontrivial
# unaligned index array
b = np.zeros(d.size + 1).view(np.int8)[1:-(np.intp(0).itemsize - 1)]
b = b.view(np.intp)[:d.size]
b[...] = np.arange(d.size)
assert_equal(d[b.astype(np.int16)], d)
d[b.astype(np.int16)] = x
# boolean
d[b % 2 == 0]
d[b % 2 == 0] = x[::2]
def test_tuple_subclass(self):
arr = np.ones((5, 5))
# A tuple subclass should also be an nd-index
class TupleSubclass(tuple):
pass
index = ([1], [1])
index = TupleSubclass(index)
assert_(arr[index].shape == (1,))
# Unlike the non nd-index:
assert_(arr[index,].shape != (1,))
def test_broken_sequence_not_nd_index(self):
# See gh-5063:
# If we have an object which claims to be a sequence, but fails
# on item getting, this should not be converted to an nd-index (tuple)
# If this object happens to be a valid index otherwise, it should work
# This object here is very dubious and probably bad though:
class SequenceLike(object):
def __index__(self):
return 0
def __len__(self):
return 1
def __getitem__(self, item):
raise IndexError('Not possible')
arr = np.arange(10)
assert_array_equal(arr[SequenceLike()], arr[SequenceLike(),])
# also test that field indexing does not segfault
# for a similar reason, by indexing a structured array
arr = np.zeros((1,), dtype=[('f1', 'i8'), ('f2', 'i8')])
assert_array_equal(arr[SequenceLike()], arr[SequenceLike(),])
def test_indexing_array_weird_strides(self):
# See also gh-6221
# the shapes used here come from the issue and create the correct
# size for the iterator buffering size.
x = np.ones(10)
x2 = np.ones((10, 2))
ind = np.arange(10)[:, None, None, None]
ind = np.broadcast_to(ind, (10, 55, 4, 4))
# single advanced index case
assert_array_equal(x[ind], x[ind.copy()])
# higher dimensional advanced index
zind = np.zeros(4, dtype=np.intp)
assert_array_equal(x2[ind, zind], x2[ind.copy(), zind])
class TestFieldIndexing(TestCase):
def test_scalar_return_type(self):
# Field access on an array should return an array, even if it
# is 0-d.
a = np.zeros((), [('a','f8')])
assert_(isinstance(a['a'], np.ndarray))
assert_(isinstance(a[['a']], np.ndarray))
class TestBroadcastedAssignments(TestCase):
def assign(self, a, ind, val):
a[ind] = val
return a
def test_prepending_ones(self):
a = np.zeros((3, 2))
a[...] = np.ones((1, 3, 2))
# Fancy with subspace with and without transpose
a[[0, 1, 2], :] = np.ones((1, 3, 2))
a[:, [0, 1]] = np.ones((1, 3, 2))
# Fancy without subspace (with broadcasting)
a[[[0], [1], [2]], [0, 1]] = np.ones((1, 3, 2))
def test_prepend_not_one(self):
assign = self.assign
s_ = np.s_
a = np.zeros(5)
# Too large and not only ones.
assert_raises(ValueError, assign, a, s_[...], np.ones((2, 1)))
with warnings.catch_warnings():
# Will be a ValueError as well.
warnings.simplefilter("error", DeprecationWarning)
assert_raises(DeprecationWarning, assign, a, s_[[1, 2, 3],],
np.ones((2, 1)))
assert_raises(DeprecationWarning, assign, a, s_[[[1], [2]],],
np.ones((2,2,1)))
def test_simple_broadcasting_errors(self):
assign = self.assign
s_ = np.s_
a = np.zeros((5, 1))
assert_raises(ValueError, assign, a, s_[...], np.zeros((5, 2)))
assert_raises(ValueError, assign, a, s_[...], np.zeros((5, 0)))
assert_raises(ValueError, assign, a, s_[:, [0]], np.zeros((5, 2)))
assert_raises(ValueError, assign, a, s_[:, [0]], np.zeros((5, 0)))
assert_raises(ValueError, assign, a, s_[[0], :], np.zeros((2, 1)))
def test_index_is_larger(self):
# Simple case of fancy index broadcasting of the index.
a = np.zeros((5, 5))
a[[[0], [1], [2]], [0, 1, 2]] = [2, 3, 4]
assert_((a[:3, :3] == [2, 3, 4]).all())
def test_broadcast_subspace(self):
a = np.zeros((100, 100))
v = np.arange(100)[:,None]
b = np.arange(100)[::-1]
a[b] = v
assert_((a[::-1] == v).all())
class TestSubclasses(TestCase):
def test_basic(self):
class SubClass(np.ndarray):
pass
s = np.arange(5).view(SubClass)
assert_(isinstance(s[:3], SubClass))
assert_(s[:3].base is s)
assert_(isinstance(s[[0, 1, 2]], SubClass))
assert_(isinstance(s[s > 0], SubClass))
def test_matrix_fancy(self):
# The matrix class messes with the shape. While this is always
# weird (getitem is not used, it does not have setitem nor knows
# about fancy indexing), this tests gh-3110
m = np.matrix([[1, 2], [3, 4]])
assert_(isinstance(m[[0,1,0], :], np.matrix))
# gh-3110. Note the transpose currently because matrices do *not*
# support dimension fixing for fancy indexing correctly.
x = np.asmatrix(np.arange(50).reshape(5,10))
assert_equal(x[:2, np.array(-1)], x[:2, -1].T)
def test_finalize_gets_full_info(self):
# Array finalize should be called on the filled array.
class SubClass(np.ndarray):
def __array_finalize__(self, old):
self.finalize_status = np.array(self)
self.old = old
s = np.arange(10).view(SubClass)
new_s = s[:3]
assert_array_equal(new_s.finalize_status, new_s)
assert_array_equal(new_s.old, s)
new_s = s[[0,1,2,3]]
assert_array_equal(new_s.finalize_status, new_s)
assert_array_equal(new_s.old, s)
new_s = s[s > 0]
assert_array_equal(new_s.finalize_status, new_s)
assert_array_equal(new_s.old, s)
class TestFancingIndexingCast(TestCase):
def test_boolean_index_cast_assign(self):
# Setup the boolean index and float arrays.
shape = (8, 63)
bool_index = np.zeros(shape).astype(bool)
bool_index[0, 1] = True
zero_array = np.zeros(shape)
# Assigning float is fine.
zero_array[bool_index] = np.array([1])
assert_equal(zero_array[0, 1], 1)
# Fancy indexing works, although we get a cast warning.
assert_warns(np.ComplexWarning,
zero_array.__setitem__, ([0], [1]), np.array([2 + 1j]))
assert_equal(zero_array[0, 1], 2) # No complex part
# Cast complex to float, throwing away the imaginary portion.
assert_warns(np.ComplexWarning,
zero_array.__setitem__, bool_index, np.array([1j]))
assert_equal(zero_array[0, 1], 0)
class TestFancyIndexingEquivalence(TestCase):
def test_object_assign(self):
# Check that the field and object special case using copyto is active.
# The right hand side cannot be converted to an array here.
a = np.arange(5, dtype=object)
b = a.copy()
a[:3] = [1, (1,2), 3]
b[[0, 1, 2]] = [1, (1,2), 3]
assert_array_equal(a, b)
# test same for subspace fancy indexing
b = np.arange(5, dtype=object)[None, :]
b[[0], :3] = [[1, (1,2), 3]]
assert_array_equal(a, b[0])
# Check that swapping of axes works.
# There was a bug that made the later assignment throw a ValueError
# do to an incorrectly transposed temporary right hand side (gh-5714)
b = b.T
b[:3, [0]] = [[1], [(1,2)], [3]]
assert_array_equal(a, b[:, 0])
# Another test for the memory order of the subspace
arr = np.ones((3, 4, 5), dtype=object)
# Equivalent slicing assignment for comparison
cmp_arr = arr.copy()
cmp_arr[:1, ...] = [[[1], [2], [3], [4]]]
arr[[0], ...] = [[[1], [2], [3], [4]]]
assert_array_equal(arr, cmp_arr)
arr = arr.copy('F')
arr[[0], ...] = [[[1], [2], [3], [4]]]
assert_array_equal(arr, cmp_arr)
def test_cast_equivalence(self):
# Yes, normal slicing uses unsafe casting.
a = np.arange(5)
b = a.copy()
a[:3] = np.array(['2', '-3', '-1'])
b[[0, 2, 1]] = np.array(['2', '-1', '-3'])
assert_array_equal(a, b)
# test the same for subspace fancy indexing
b = np.arange(5)[None, :]
b[[0], :3] = np.array([['2', '-3', '-1']])
assert_array_equal(a, b[0])
class TestMultiIndexingAutomated(TestCase):
"""
These test use code to mimic the C-Code indexing for selection.
NOTE: * This still lacks tests for complex item setting.
* If you change behavior of indexing, you might want to modify
these tests to try more combinations.
* Behavior was written to match numpy version 1.8. (though a
first version matched 1.7.)
* Only tuple indices are supported by the mimicking code.
(and tested as of writing this)
* Error types should match most of the time as long as there
is only one error. For multiple errors, what gets raised
will usually not be the same one. They are *not* tested.
"""
def setUp(self):
self.a = np.arange(np.prod([3, 1, 5, 6])).reshape(3, 1, 5, 6)
self.b = np.empty((3, 0, 5, 6))
self.complex_indices = ['skip', Ellipsis,
0,
# Boolean indices, up to 3-d for some special cases of eating up
# dimensions, also need to test all False
np.array(False),
np.array([True, False, False]),
np.array([[True, False], [False, True]]),
np.array([[[False, False], [False, False]]]),
# Some slices:
slice(-5, 5, 2),
slice(1, 1, 100),
slice(4, -1, -2),
slice(None, None, -3),
# Some Fancy indexes:
np.empty((0, 1, 1), dtype=np.intp), # empty and can be broadcast
np.array([0, 1, -2]),
np.array([[2], [0], [1]]),
np.array([[0, -1], [0, 1]], dtype=np.dtype('intp').newbyteorder()),
np.array([2, -1], dtype=np.int8),
np.zeros([1]*31, dtype=int), # trigger too large array.
np.array([0., 1.])] # invalid datatype
# Some simpler indices that still cover a bit more
self.simple_indices = [Ellipsis, None, -1, [1], np.array([True]), 'skip']
# Very simple ones to fill the rest:
self.fill_indices = [slice(None, None), 0]
def _get_multi_index(self, arr, indices):
"""Mimic multi dimensional indexing.
Parameters
----------
arr : ndarray
Array to be indexed.
indices : tuple of index objects
Returns
-------
out : ndarray
An array equivalent to the indexing operation (but always a copy).
`arr[indices]` should be identical.
no_copy : bool
Whether the indexing operation requires a copy. If this is `True`,
`np.may_share_memory(arr, arr[indicies])` should be `True` (with
some exceptions for scalars and possibly 0-d arrays).
Notes
-----
While the function may mostly match the errors of normal indexing this
is generally not the case.
"""
in_indices = list(indices)
indices = []
# if False, this is a fancy or boolean index
no_copy = True
# number of fancy/scalar indexes that are not consecutive
num_fancy = 0
# number of dimensions indexed by a "fancy" index
fancy_dim = 0
# NOTE: This is a funny twist (and probably OK to change).
# The boolean array has illegal indexes, but this is
# allowed if the broadcast fancy-indices are 0-sized.
# This variable is to catch that case.
error_unless_broadcast_to_empty = False
# We need to handle Ellipsis and make arrays from indices, also
# check if this is fancy indexing (set no_copy).
ndim = 0
ellipsis_pos = None # define here mostly to replace all but first.
for i, indx in enumerate(in_indices):
if indx is None:
continue
if isinstance(indx, np.ndarray) and indx.dtype == bool:
no_copy = False
if indx.ndim == 0:
raise IndexError
# boolean indices can have higher dimensions
ndim += indx.ndim
fancy_dim += indx.ndim
continue
if indx is Ellipsis:
if ellipsis_pos is None:
ellipsis_pos = i
continue # do not increment ndim counter
raise IndexError
if isinstance(indx, slice):
ndim += 1
continue
if not isinstance(indx, np.ndarray):
# This could be open for changes in numpy.
# numpy should maybe raise an error if casting to intp
# is not safe. It rejects np.array([1., 2.]) but not
# [1., 2.] as index (same for ie. np.take).
# (Note the importance of empty lists if changing this here)
indx = np.array(indx, dtype=np.intp)
in_indices[i] = indx
elif indx.dtype.kind != 'b' and indx.dtype.kind != 'i':
raise IndexError('arrays used as indices must be of integer (or boolean) type')
if indx.ndim != 0:
no_copy = False
ndim += 1
fancy_dim += 1
if arr.ndim - ndim < 0:
# we can't take more dimensions then we have, not even for 0-d arrays.
# since a[()] makes sense, but not a[(),]. We will raise an error
# later on, unless a broadcasting error occurs first.
raise IndexError
if ndim == 0 and None not in in_indices:
# Well we have no indexes or one Ellipsis. This is legal.
return arr.copy(), no_copy
if ellipsis_pos is not None:
in_indices[ellipsis_pos:ellipsis_pos+1] = [slice(None, None)] * (arr.ndim - ndim)
for ax, indx in enumerate(in_indices):
if isinstance(indx, slice):
# convert to an index array
indx = np.arange(*indx.indices(arr.shape[ax]))
indices.append(['s', indx])
continue
elif indx is None:
# this is like taking a slice with one element from a new axis:
indices.append(['n', np.array([0], dtype=np.intp)])
arr = arr.reshape((arr.shape[:ax] + (1,) + arr.shape[ax:]))
continue
if isinstance(indx, np.ndarray) and indx.dtype == bool:
if indx.shape != arr.shape[ax:ax+indx.ndim]:
raise IndexError
try:
flat_indx = np.ravel_multi_index(np.nonzero(indx),
arr.shape[ax:ax+indx.ndim], mode='raise')
except:
error_unless_broadcast_to_empty = True
# fill with 0s instead, and raise error later
flat_indx = np.array([0]*indx.sum(), dtype=np.intp)
# concatenate axis into a single one:
if indx.ndim != 0:
arr = arr.reshape((arr.shape[:ax]
+ (np.prod(arr.shape[ax:ax+indx.ndim]),)
+ arr.shape[ax+indx.ndim:]))
indx = flat_indx
else:
# This could be changed, a 0-d boolean index can
# make sense (even outside the 0-d indexed array case)
# Note that originally this is could be interpreted as
# integer in the full integer special case.
raise IndexError
else:
# If the index is a singleton, the bounds check is done
# before the broadcasting. This used to be different in <1.9
if indx.ndim == 0:
if indx >= arr.shape[ax] or indx < -arr.shape[ax]:
raise IndexError
if indx.ndim == 0:
# The index is a scalar. This used to be two fold, but if fancy
# indexing was active, the check was done later, possibly
# after broadcasting it away (1.7. or earlier). Now it is always
# done.
if indx >= arr.shape[ax] or indx < - arr.shape[ax]:
raise IndexError
if len(indices) > 0 and indices[-1][0] == 'f' and ax != ellipsis_pos:
# NOTE: There could still have been a 0-sized Ellipsis
# between them. Checked that with ellipsis_pos.
indices[-1].append(indx)
else:
# We have a fancy index that is not after an existing one.
# NOTE: A 0-d array triggers this as well, while
# one may expect it to not trigger it, since a scalar
# would not be considered fancy indexing.
num_fancy += 1
indices.append(['f', indx])
if num_fancy > 1 and not no_copy:
# We have to flush the fancy indexes left
new_indices = indices[:]
axes = list(range(arr.ndim))
fancy_axes = []
new_indices.insert(0, ['f'])
ni = 0
ai = 0
for indx in indices:
ni += 1
if indx[0] == 'f':
new_indices[0].extend(indx[1:])
del new_indices[ni]
ni -= 1
for ax in range(ai, ai + len(indx[1:])):
fancy_axes.append(ax)
axes.remove(ax)
ai += len(indx) - 1 # axis we are at
indices = new_indices
# and now we need to transpose arr:
arr = arr.transpose(*(fancy_axes + axes))
# We only have one 'f' index now and arr is transposed accordingly.
# Now handle newaxis by reshaping...
ax = 0
for indx in indices:
if indx[0] == 'f':
if len(indx) == 1:
continue
# First of all, reshape arr to combine fancy axes into one:
orig_shape = arr.shape
orig_slice = orig_shape[ax:ax + len(indx[1:])]
arr = arr.reshape((arr.shape[:ax]
+ (np.prod(orig_slice).astype(int),)
+ arr.shape[ax + len(indx[1:]):]))
# Check if broadcasting works
if len(indx[1:]) != 1:
res = np.broadcast(*indx[1:]) # raises ValueError...
else:
res = indx[1]
# unfortunately the indices might be out of bounds. So check
# that first, and use mode='wrap' then. However only if
# there are any indices...
if res.size != 0:
if error_unless_broadcast_to_empty:
raise IndexError
for _indx, _size in zip(indx[1:], orig_slice):
if _indx.size == 0:
continue
if np.any(_indx >= _size) or np.any(_indx < -_size):
raise IndexError
if len(indx[1:]) == len(orig_slice):
if np.product(orig_slice) == 0:
# Work around for a crash or IndexError with 'wrap'
# in some 0-sized cases.
try:
mi = np.ravel_multi_index(indx[1:], orig_slice, mode='raise')
except:
# This happens with 0-sized orig_slice (sometimes?)
# here it is a ValueError, but indexing gives a:
raise IndexError('invalid index into 0-sized')
else:
mi = np.ravel_multi_index(indx[1:], orig_slice, mode='wrap')
else:
# Maybe never happens...
raise ValueError
arr = arr.take(mi.ravel(), axis=ax)
arr = arr.reshape((arr.shape[:ax]
+ mi.shape
+ arr.shape[ax+1:]))
ax += mi.ndim
continue
# If we are here, we have a 1D array for take:
arr = arr.take(indx[1], axis=ax)
ax += 1
return arr, no_copy
def _check_multi_index(self, arr, index):
"""Check a multi index item getting and simple setting.
Parameters
----------
arr : ndarray
Array to be indexed, must be a reshaped arange.
index : tuple of indexing objects
Index being tested.
"""
# Test item getting
try:
mimic_get, no_copy = self._get_multi_index(arr, index)
except Exception:
prev_refcount = sys.getrefcount(arr)
assert_raises(Exception, arr.__getitem__, index)
assert_raises(Exception, arr.__setitem__, index, 0)
assert_equal(prev_refcount, sys.getrefcount(arr))
return
self._compare_index_result(arr, index, mimic_get, no_copy)
def _check_single_index(self, arr, index):
"""Check a single index item getting and simple setting.
Parameters
----------
arr : ndarray
Array to be indexed, must be an arange.
index : indexing object
Index being tested. Must be a single index and not a tuple
of indexing objects (see also `_check_multi_index`).
"""
try:
mimic_get, no_copy = self._get_multi_index(arr, (index,))
except Exception:
prev_refcount = sys.getrefcount(arr)
assert_raises(Exception, arr.__getitem__, index)
assert_raises(Exception, arr.__setitem__, index, 0)
assert_equal(prev_refcount, sys.getrefcount(arr))
return
self._compare_index_result(arr, index, mimic_get, no_copy)
def _compare_index_result(self, arr, index, mimic_get, no_copy):
"""Compare mimicked result to indexing result.
"""
arr = arr.copy()
indexed_arr = arr[index]
assert_array_equal(indexed_arr, mimic_get)
# Check if we got a view, unless its a 0-sized or 0-d array.
# (then its not a view, and that does not matter)
if indexed_arr.size != 0 and indexed_arr.ndim != 0:
assert_(np.may_share_memory(indexed_arr, arr) == no_copy)
# Check reference count of the original array
if no_copy:
# refcount increases by one:
assert_equal(sys.getrefcount(arr), 3)
else:
assert_equal(sys.getrefcount(arr), 2)
# Test non-broadcast setitem:
b = arr.copy()
b[index] = mimic_get + 1000
if b.size == 0:
return # nothing to compare here...
if no_copy and indexed_arr.ndim != 0:
# change indexed_arr in-place to manipulate original:
indexed_arr += 1000
assert_array_equal(arr, b)
return
# Use the fact that the array is originally an arange:
arr.flat[indexed_arr.ravel()] += 1000
assert_array_equal(arr, b)
def test_boolean(self):
a = np.array(5)
assert_equal(a[np.array(True)], 5)
a[np.array(True)] = 1
assert_equal(a, 1)
# NOTE: This is different from normal broadcasting, as
# arr[boolean_array] works like in a multi index. Which means
# it is aligned to the left. This is probably correct for
# consistency with arr[boolean_array,] also no broadcasting
# is done at all
self._check_multi_index(self.a, (np.zeros_like(self.a, dtype=bool),))
self._check_multi_index(self.a, (np.zeros_like(self.a, dtype=bool)[..., 0],))
self._check_multi_index(self.a, (np.zeros_like(self.a, dtype=bool)[None, ...],))
def test_multidim(self):
# Automatically test combinations with complex indexes on 2nd (or 1st)
# spot and the simple ones in one other spot.
with warnings.catch_warnings():
# This is so that np.array(True) is not accepted in a full integer
# index, when running the file separately.
warnings.filterwarnings('error', '', DeprecationWarning)
warnings.filterwarnings('error', '', np.VisibleDeprecationWarning)
def isskip(idx):
return isinstance(idx, str) and idx == "skip"
for simple_pos in [0, 2, 3]:
tocheck = [self.fill_indices, self.complex_indices,
self.fill_indices, self.fill_indices]
tocheck[simple_pos] = self.simple_indices
for index in product(*tocheck):
index = tuple(i for i in index if not isskip(i))
self._check_multi_index(self.a, index)
self._check_multi_index(self.b, index)
# Check very simple item getting:
self._check_multi_index(self.a, (0, 0, 0, 0))
self._check_multi_index(self.b, (0, 0, 0, 0))
# Also check (simple cases of) too many indices:
assert_raises(IndexError, self.a.__getitem__, (0, 0, 0, 0, 0))
assert_raises(IndexError, self.a.__setitem__, (0, 0, 0, 0, 0), 0)
assert_raises(IndexError, self.a.__getitem__, (0, 0, [1], 0, 0))
assert_raises(IndexError, self.a.__setitem__, (0, 0, [1], 0, 0), 0)
def test_1d(self):
a = np.arange(10)
with warnings.catch_warnings():
warnings.filterwarnings('error', '', np.VisibleDeprecationWarning)
for index in self.complex_indices:
self._check_single_index(a, index)
class TestCApiAccess(TestCase):
def test_getitem(self):
subscript = functools.partial(array_indexing, 0)
# 0-d arrays don't work:
assert_raises(IndexError, subscript, np.ones(()), 0)
# Out of bound values:
assert_raises(IndexError, subscript, np.ones(10), 11)
assert_raises(IndexError, subscript, np.ones(10), -11)
assert_raises(IndexError, subscript, np.ones((10, 10)), 11)
assert_raises(IndexError, subscript, np.ones((10, 10)), -11)
a = np.arange(10)
assert_array_equal(a[4], subscript(a, 4))
a = a.reshape(5, 2)
assert_array_equal(a[-4], subscript(a, -4))
def test_setitem(self):
assign = functools.partial(array_indexing, 1)
# Deletion is impossible:
assert_raises(ValueError, assign, np.ones(10), 0)
# 0-d arrays don't work:
assert_raises(IndexError, assign, np.ones(()), 0, 0)
# Out of bound values:
assert_raises(IndexError, assign, np.ones(10), 11, 0)
assert_raises(IndexError, assign, np.ones(10), -11, 0)
assert_raises(IndexError, assign, np.ones((10, 10)), 11, 0)
assert_raises(IndexError, assign, np.ones((10, 10)), -11, 0)
a = np.arange(10)
assign(a, 4, 10)
assert_(a[4] == 10)
a = a.reshape(5, 2)
assign(a, 4, 10)
assert_array_equal(a[-1], [10, 10])
if __name__ == "__main__":
run_module_suite()
|
jalexvig/tensorflow | refs/heads/master | tensorflow/contrib/data/python/kernel_tests/interleave_dataset_op_test.py | 11 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import math
import threading
import time
from six.moves import zip_longest
from tensorflow.contrib.data.python.ops import interleave_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import test
class ParallelInterleaveDatasetTest(test.TestCase):
def setUp(self):
self.input_values = array_ops.placeholder(dtypes.int64, shape=[None])
self.cycle_length = array_ops.placeholder(dtypes.int64, shape=[])
self.block_length = array_ops.placeholder(dtypes.int64, shape=[])
self.sloppy = array_ops.placeholder(dtypes.bool, shape=[])
self.buffer_output_elements = array_ops.placeholder(dtypes.int64, shape=[])
self.prefetch_input_elements = array_ops.placeholder(dtypes.int64, shape=[])
self.error = None
self.repeat_count = 2
# Set up threading events used to sequence when items are produced that
# are subsequently interleaved. These events allow us to deterministically
# simulate slowdowns and force sloppiness.
self.read_coordination_events = {}
self.write_coordination_events = {}
# input values [4, 5, 6] are the common case for the tests; set defaults
for i in range(4, 7):
self.read_coordination_events[i] = threading.Semaphore(0)
self.write_coordination_events[i] = threading.Event()
def map_py_fn(x):
self.write_coordination_events[x].wait()
self.write_coordination_events[x].clear()
self.read_coordination_events[x].release()
if self.error:
err = self.error
self.error = None
raise err # pylint: disable=raising-bad-type
return x * x
def map_fn(x):
return script_ops.py_func(map_py_fn, [x], x.dtype)
def interleave_fn(x):
dataset = dataset_ops.Dataset.from_tensors(x)
dataset = dataset.repeat(x)
return dataset.map(map_fn)
self.dataset = (
dataset_ops.Dataset.from_tensor_slices(self.input_values)
.repeat(self.repeat_count).apply(
interleave_ops.parallel_interleave(interleave_fn, self.cycle_length,
self.block_length, self.sloppy,
self.buffer_output_elements,
self.prefetch_input_elements)))
self.iterator = self.dataset.make_initializable_iterator()
self.init_op = self.iterator.initializer
self.next_element = self.iterator.get_next()
def _interleave(self, lists, cycle_length, block_length):
"""Python implementation of interleave used for testing."""
num_open = 0
# `all_iterators` acts as a queue of iterators over each element of `lists`.
all_iterators = [iter(l) for l in lists]
# `open_iterators` are the iterators whose elements are currently being
# interleaved.
open_iterators = []
for i in range(cycle_length):
if all_iterators:
open_iterators.append(all_iterators.pop(0))
num_open += 1
else:
open_iterators.append(None)
while num_open or all_iterators:
for i in range(cycle_length):
if open_iterators[i] is None:
if all_iterators:
open_iterators[i] = all_iterators.pop(0)
num_open += 1
else:
continue
for _ in range(block_length):
try:
yield next(open_iterators[i])
except StopIteration:
open_iterators[i] = None
num_open -= 1
break
def testPythonImplementation(self):
input_lists = [[4, 4, 4, 4], [5, 5, 5, 5, 5], [6, 6, 6, 6, 6, 6],
[4, 4, 4, 4], [5, 5, 5, 5, 5], [6, 6, 6, 6, 6, 6]]
# Cycle length 1 acts like `Dataset.flat_map()`.
expected_elements = itertools.chain(*input_lists)
for expected, produced in zip(expected_elements,
self._interleave(input_lists, 1, 1)):
self.assertEqual(expected, produced)
# Cycle length > 1.
expected_elements = [
4, 5, 4, 5, 4, 5, 4, 5, 5, 6, 6, 4, 6, 4, 6, 4, 6, 4, 6, 5, 6, 5, 6, 5,
6, 5, 6, 5, 6, 6
]
for index, (expected, produced) in enumerate(
zip_longest(expected_elements, self._interleave(input_lists, 2, 1))):
self.assertEqual(expected, produced, "Values differ at %s. %s != %s" %
(index, expected, produced))
def testPythonImplementationBlockLength(self):
input_lists = [[4] * 4, [5] * 5, [6] * 6] * 2
expected_elements = [
4, 4, 5, 5, 4, 4, 5, 5, 5, 6, 6, 4, 4, 6, 6, 4, 4, 6, 6, 5, 5, 6, 6, 5,
5, 6, 6, 5, 6, 6
]
for index, (expected, produced) in enumerate(
zip_longest(expected_elements, self._interleave(input_lists, 2, 2))):
self.assertEqual(expected, produced, "Values differ at %s. %s != %s" %
(index, expected, produced))
def testPythonImplementationEmptyLists(self):
input_lists = [[4, 4, 4, 4], [], [6, 6, 6, 6, 6, 6], [4, 4, 4, 4], [],
[6, 6, 6, 6, 6, 6]]
expected_elements = [
4, 4, 6, 4, 6, 4, 6, 6, 4, 6, 4, 6, 4, 4, 6, 6, 6, 6, 6, 6
]
for index, (expected, produced) in enumerate(
zip_longest(expected_elements, self._interleave(input_lists, 2, 1))):
self.assertEqual(expected, produced, "Values differ at %s. %s != %s" %
(index, expected, produced))
def _clear_coordination_events(self):
for i in range(4, 7):
self.read_coordination_events[i] = threading.Semaphore(0)
self.write_coordination_events[i].clear()
def _allow_all_map_threads(self):
for i in range(4, 7):
self.write_coordination_events[i].set()
def _testSingleThreaded(self, sloppy=False, prefetch_input_elements=0):
# cycle_length=1,block_length=1 acts like `Dataset.interleave()` and
# `Dataset.flat_map()` and is single-threaded. No synchronization required.
with self.test_session() as sess:
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 1,
self.block_length: 1,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: prefetch_input_elements,
})
for expected_element in self._interleave(
[[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 1, 1):
self.write_coordination_events[expected_element].set()
self.assertEqual(expected_element * expected_element,
sess.run(self.next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testSingleThreaded(self):
self._testSingleThreaded()
def testSingleThreadedSloppy(self):
self._testSingleThreaded(sloppy=True)
def testSingleThreadedPrefetch1Itr(self):
self._testSingleThreaded(prefetch_input_elements=1)
def testSingleThreadedPrefetch1ItrSloppy(self):
self._testSingleThreaded(prefetch_input_elements=1, sloppy=True)
def testSingleThreadedRagged(self):
# Tests a sequence with wildly different elements per iterator.
with self.test_session() as sess:
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [3, 7, 4],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: False,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 1,
})
# Add coordination values for 3 and 7
self.read_coordination_events[3] = threading.Semaphore(0)
self.write_coordination_events[3] = threading.Event()
self.read_coordination_events[7] = threading.Semaphore(0)
self.write_coordination_events[7] = threading.Event()
for expected_element in self._interleave(
[[3] * 3, [7] * 7, [4] * 4] * self.repeat_count, 2, 1):
self.write_coordination_events[expected_element].set()
output = sess.run(self.next_element)
self.assertEqual(expected_element * expected_element, output)
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def _testTwoThreadsNoContention(self, sloppy=False):
# num_threads > 1.
# Explicit coordination should result in `Dataset.interleave()` behavior
with self.test_session() as sess:
self._clear_coordination_events()
done_first_event = False
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 1,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 2,
1)):
self.write_coordination_events[expected_element].set()
if done_first_event: # First event starts the worker threads.
self.read_coordination_events[expected_element].acquire()
actual_element = sess.run(self.next_element)
if not done_first_event:
self.read_coordination_events[expected_element].acquire()
done_first_event = True
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testTwoThreadsNoContention(self):
self._testTwoThreadsNoContention()
def testTwoThreadsNoContentionSloppy(self):
self._testTwoThreadsNoContention(sloppy=True)
def _testTwoThreadsNoContentionWithRaces(self, sloppy=False):
"""Tests where all the workers race in producing elements.
Note: this is in contrast with the previous test which carefully sequences
the execution of the map functions.
Args:
sloppy: Whether to be sloppy or not.
"""
with self.test_session() as sess:
self._clear_coordination_events()
done_first_event = False
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 1,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 2,
1)):
if done_first_event: # First event starts the worker threads.
self._allow_all_map_threads()
self.read_coordination_events[expected_element].acquire()
else:
self.write_coordination_events[expected_element].set()
time.sleep(0.5) # Sleep to consistently "avoid" the race condition.
actual_element = sess.run(self.next_element)
if not done_first_event:
done_first_event = True
self.assertTrue(
self.read_coordination_events[expected_element].acquire(False))
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testTwoThreadsNoContentionWithRaces(self):
self._testTwoThreadsNoContentionWithRaces()
def testTwoThreadsNoContentionWithRacesSloppy(self):
self._testTwoThreadsNoContentionWithRaces(sloppy=True)
def _testTwoThreadsNoContentionBlockLength(self, sloppy=False):
# num_threads > 1.
# Explicit coordination should result in `Dataset.interleave()` behavior
with self.test_session() as sess:
self._clear_coordination_events()
done_first_event = False
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 2,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 1,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 2,
2)):
self.write_coordination_events[expected_element].set()
if done_first_event: # First event starts the worker threads.
self.read_coordination_events[expected_element].acquire()
actual_element = sess.run(self.next_element)
if not done_first_event:
done_first_event = True
self.read_coordination_events[expected_element].acquire()
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testTwoThreadsNoContentionBlockLength(self):
self._testTwoThreadsNoContentionBlockLength()
def testTwoThreadsNoContentionBlockLengthSloppy(self):
self._testTwoThreadsNoContentionBlockLength(sloppy=True)
def _testTwoThreadsNoContentionWithRacesAndBlocking(self, sloppy=False):
"""Tests where all the workers race in producing elements.
Note: this is in contrast with the previous test which carefully sequences
the execution of the map functions.
Args:
sloppy: Whether to be sloppy or not.
"""
with self.test_session() as sess:
self._clear_coordination_events()
done_first_event = False
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 2,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 1,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 2,
2)):
if done_first_event: # First event starts the worker threads.
self._allow_all_map_threads()
self.read_coordination_events[expected_element].acquire()
else:
self.write_coordination_events[expected_element].set()
time.sleep(0.5) # Sleep to consistently "avoid" the race condition.
actual_element = sess.run(self.next_element)
if not done_first_event:
done_first_event = True
self.assertTrue(
self.read_coordination_events[expected_element].acquire(False))
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testTwoThreadsNoContentionWithRacesAndBlocking(self):
self._testTwoThreadsNoContentionWithRacesAndBlocking()
def testTwoThreadsNoContentionWithRacesAndBlockingSloppy(self):
self._testTwoThreadsNoContentionWithRacesAndBlocking(sloppy=True)
def _testEmptyInput(self, sloppy=False):
with self.test_session() as sess:
# Empty input.
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [],
self.cycle_length: 2,
self.block_length: 3,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testEmptyInput(self):
self._testEmptyInput()
def testEmptyInputSloppy(self):
self._testEmptyInput(sloppy=True)
def _testNonEmptyInputIntoEmptyOutputs(self, sloppy=False):
# Non-empty input leading to empty output.
with self.test_session() as sess:
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [0, 0, 0],
self.cycle_length: 2,
self.block_length: 3,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testNonEmptyInputIntoEmptyOutputs(self):
self._testNonEmptyInputIntoEmptyOutputs()
def testNonEmptyInputIntoEmptyOutputsSloppy(self):
self._testNonEmptyInputIntoEmptyOutputs(sloppy=True)
def _testPartiallyEmptyOutputs(self, sloppy=False, prefetch_input_elements=1):
race_indices = {2, 8, 14} # Sequence points when sloppy mode has race conds
# Mixture of non-empty and empty interleaved datasets.
with self.test_session() as sess:
self._clear_coordination_events()
done_first_event = False
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 0, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: prefetch_input_elements,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [], [6] * 6] * self.repeat_count, 2, 1)):
self.write_coordination_events[expected_element].set()
# First event starts the worker threads. Additionally, when running the
# sloppy case with prefetch_input_elements=0, we get stuck if we wait
# for the read coordination event for certain event orderings in the
# presence of finishing iterators.
if done_first_event and not (sloppy and (i in race_indices)):
self.read_coordination_events[expected_element].acquire()
actual_element = sess.run(self.next_element)
if not done_first_event or (sloppy and (i in race_indices)):
done_first_event = True
self.read_coordination_events[expected_element].acquire()
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
def testPartiallyEmptyOutputs(self):
self._testPartiallyEmptyOutputs()
def testPartiallyEmptyOutputsSloppy(self):
self._testPartiallyEmptyOutputs(sloppy=True, prefetch_input_elements=0)
def testDelayedOutputSloppy(self):
# Explicitly control the sequence of events to ensure we correctly avoid
# head-of-line blocking.
with self.test_session() as sess:
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: True,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
mis_ordering = [
4, 4, 5, 4, 5, 5, 4, 5, 6, 6, 6, 5, 4, 4, 6, 6, 4, 4, 6, 5, 6, 6, 6,
6, 5, 5, 5, 5, 6, 6
]
for element in mis_ordering:
self.write_coordination_events[element].set()
self.assertEqual(element * element, sess.run(self.next_element))
self.assertTrue(self.read_coordination_events[element].acquire(False))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testBlockLengthWithContentionSloppy(self):
with self.test_session() as sess:
self._clear_coordination_events()
done_first_event = False
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: True,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 1,
})
# Test against a generating sequence that differs from the uncontended
# case, in order to prove sloppy correctness.
for i, expected_element in enumerate(
self._interleave(
[[4] * 4, [5] * 5, [6] * 6] * self.repeat_count,
cycle_length=2,
block_length=3)):
self.write_coordination_events[expected_element].set()
if done_first_event: # First event starts the worker threads.
self.read_coordination_events[expected_element].acquire()
actual_element = sess.run(self.next_element)
if not done_first_event:
self.read_coordination_events[expected_element].acquire()
done_first_event = True
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def _testEarlyExit(self, sloppy=False):
# Exiting without consuming all input should not block
with self.test_session() as sess:
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 3,
self.block_length: 2,
self.sloppy: sloppy,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
for i in range(4, 7):
self.write_coordination_events[i].set()
elem = sess.run(self.next_element) # Start all workers
# Allow the one successful worker to progress beyond the py_func again.
elem = int(math.sqrt(elem))
self.write_coordination_events[elem].set()
self.read_coordination_events[elem].acquire()
# Allow the prefetch to succeed
for i in range(4, 7):
self.read_coordination_events[i].acquire()
self.write_coordination_events[i].set()
def testEarlyExit(self):
self._testEarlyExit()
def testEarlyExitSloppy(self):
self._testEarlyExit(sloppy=True)
def _testTooManyReaders(self, sloppy=False):
def interleave_fn(x):
dataset = dataset_ops.Dataset.from_tensors(x)
dataset = dataset.repeat(math_ops.cast(x, dtype=dtypes.int64))
return dataset
dataset = dataset_ops.Dataset.from_tensor_slices([4, 5, 6])
dataset = dataset.repeat(self.repeat_count)
dataset = dataset.apply(
interleave_ops.parallel_interleave(
interleave_fn, cycle_length=16, block_length=2, sloppy=sloppy))
iterator = dataset.make_one_shot_iterator()
with self.test_session() as sess:
output_values = []
for _ in range(30):
output_values.append(sess.run(iterator.get_next()))
expected_values = self._interleave(
[[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 1, 2)
self.assertItemsEqual(output_values, expected_values)
def testTooManyReaders(self):
self._testTooManyReaders()
def testTooManyReadersSloppy(self):
self._testTooManyReaders(sloppy=True)
def testSparse(self):
def _map_fn(i):
return sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 1]], values=(i * [1, -1]), dense_shape=[2, 2])
def _interleave_fn(x):
return dataset_ops.Dataset.from_tensor_slices(
sparse_ops.sparse_to_dense(x.indices, x.dense_shape, x.values))
dataset = dataset_ops.Dataset.range(10).map(_map_fn)
iterator = dataset.apply(
interleave_ops.parallel_interleave(
_interleave_fn, cycle_length=1)).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(10):
for j in range(2):
expected = [i, 0] if j % 2 == 0 else [0, -i]
self.assertAllEqual(expected, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testErrorsInOutputFn(self):
with self.test_session() as sess:
self._clear_coordination_events()
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: False,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
except_on_element_indices = set([3])
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5] * 5, [6] * 6] * self.repeat_count, 2,
1)):
if i in except_on_element_indices:
self.error = ValueError()
self.write_coordination_events[expected_element].set()
with self.assertRaises(errors.InvalidArgumentError):
sess.run(self.next_element)
else:
self.write_coordination_events[expected_element].set()
actual_element = sess.run(self.next_element)
self.assertEqual(expected_element * expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testErrorsInInputFn(self):
def map_py_fn(x):
if x == 5:
raise ValueError()
return x
def map_fn(x):
return script_ops.py_func(map_py_fn, [x], x.dtype)
def interleave_fn(x):
dataset = dataset_ops.Dataset.from_tensors(x)
dataset = dataset.repeat(x)
return dataset
self.dataset = (
dataset_ops.Dataset.from_tensor_slices(self.input_values).map(map_fn)
.repeat(self.repeat_count).apply(
interleave_ops.parallel_interleave(interleave_fn, self.cycle_length,
self.block_length, self.sloppy,
self.buffer_output_elements,
self.prefetch_input_elements)))
self.iterator = self.dataset.make_initializable_iterator()
self.init_op = self.iterator.initializer
self.next_element = self.iterator.get_next()
with self.test_session() as sess:
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: False,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5], [6] * 6] * self.repeat_count, 2, 1)):
if expected_element == 5:
with self.assertRaises(errors.InvalidArgumentError):
sess.run(self.next_element)
else:
actual_element = sess.run(self.next_element)
self.assertEqual(expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
def testErrorsInInterleaveFn(self):
def map_py_fn(x):
if x == 5:
raise ValueError()
return x
def interleave_fn(x):
dataset = dataset_ops.Dataset.from_tensors(x)
y = script_ops.py_func(map_py_fn, [x], x.dtype)
dataset = dataset.repeat(y)
return dataset
self.dataset = (
dataset_ops.Dataset.from_tensor_slices(self.input_values)
.repeat(self.repeat_count).apply(
interleave_ops.parallel_interleave(interleave_fn, self.cycle_length,
self.block_length, self.sloppy,
self.buffer_output_elements,
self.prefetch_input_elements)))
self.iterator = self.dataset.make_initializable_iterator()
self.init_op = self.iterator.initializer
self.next_element = self.iterator.get_next()
with self.test_session() as sess:
sess.run(
self.init_op,
feed_dict={
self.input_values: [4, 5, 6],
self.cycle_length: 2,
self.block_length: 1,
self.sloppy: False,
self.buffer_output_elements: 1,
self.prefetch_input_elements: 0,
})
for i, expected_element in enumerate(
self._interleave([[4] * 4, [5], [6] * 6] * self.repeat_count, 2, 1)):
if expected_element == 5:
with self.assertRaises(errors.InvalidArgumentError):
sess.run(self.next_element)
else:
actual_element = sess.run(self.next_element)
self.assertEqual(expected_element, actual_element,
"At index %s: %s expected, got: %s" %
(i, expected_element, actual_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.next_element)
if __name__ == "__main__":
test.main()
|
richardcs/ansible | refs/heads/devel | lib/ansible/modules/clustering/consul_kv.py | 35 | #!/usr/bin/python
#
# (c) 2015, Steve Gargan <steve.gargan@gmail.com>
# (c) 2018 Genome Research Ltd.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: consul_kv
short_description: Manipulate entries in the key/value store of a consul cluster
description:
- Allows the retrieval, addition, modification and deletion of key/value entries in a
consul cluster via the agent. The entire contents of the record, including
the indices, flags and session are returned as 'value'.
- If the key represents a prefix then Note that when a value is removed, the existing
value if any is returned as part of the results.
- See http://www.consul.io/docs/agent/http.html#kv for more details.
requirements:
- python >= 2.6
- python-consul
- requests
version_added: "2.0"
author:
- Steve Gargan (@sgargan)
- Colin Nolan (@colin-nolan)
options:
state:
description:
- The action to take with the supplied key and value. If the state is 'present' and `value` is set, the key
contents will be set to the value supplied and `changed` will be set to `true` only if the value was
different to the current contents. If the state is 'present' and `value` is not set, the existing value
associated to the key will be returned. The state 'absent' will remove the key/value pair,
again 'changed' will be set to true only if the key actually existed
prior to the removal. An attempt can be made to obtain or free the
lock associated with a key/value pair with the states 'acquire' or
'release' respectively. a valid session must be supplied to make the
attempt changed will be true if the attempt is successful, false
otherwise.
choices: [ absent, acquire, present, release ]
default: present
key:
description:
- The key at which the value should be stored.
required: yes
value:
description:
- The value should be associated with the given key, required if C(state)
is C(present).
required: yes
recurse:
description:
- If the key represents a prefix, each entry with the prefix can be
retrieved by setting this to C(yes).
type: bool
default: 'no'
session:
description:
- The session that should be used to acquire or release a lock
associated with a key/value pair.
token:
description:
- The token key indentifying an ACL rule set that controls access to
the key value pair
cas:
description:
- Used when acquiring a lock with a session. If the C(cas) is C(0), then
Consul will only put the key if it does not already exist. If the
C(cas) value is non-zero, then the key is only set if the index matches
the ModifyIndex of that key.
flags:
description:
- Opaque integer value that can be passed when setting a value.
host:
description:
- Host of the consul agent.
default: localhost
port:
description:
- The port on which the consul agent is running.
default: 8500
scheme:
description:
- The protocol scheme on which the consul agent is running.
default: http
version_added: "2.1"
validate_certs:
description:
- Whether to verify the tls certificate of the consul agent.
type: bool
default: 'yes'
version_added: "2.1"
"""
EXAMPLES = '''
# If the key does not exist, the value associated to the "data" property in `retrieved_key` will be `None`
# If the key value is empty string, `retrieved_key["data"]["Value"]` will be `None`
- name: retrieve a value from the key/value store
consul_kv:
key: somekey
register: retrieved_key
- name: Add or update the value associated with a key in the key/value store
consul_kv:
key: somekey
value: somevalue
- name: Remove a key from the store
consul_kv:
key: somekey
state: absent
- name: Add a node to an arbitrary group via consul inventory (see consul.ini)
consul_kv:
key: ansible/groups/dc1/somenode
value: top_secret
- name: Register a key/value pair with an associated session
consul_kv:
key: stg/node/server_birthday
value: 20160509
session: "{{ sessionid }}"
state: acquire
'''
from ansible.module_utils._text import to_text
try:
import consul
from requests.exceptions import ConnectionError
python_consul_installed = True
except ImportError:
python_consul_installed = False
from ansible.module_utils.basic import AnsibleModule
# Note: although the python-consul documentation implies that using a key with a value of `None` with `put` has a
# special meaning (https://python-consul.readthedocs.io/en/latest/#consul-kv), if not set in the subsequently API call,
# the value just defaults to an empty string (https://www.consul.io/api/kv.html#create-update-key)
NOT_SET = None
def _has_value_changed(consul_client, key, target_value):
"""
Uses the given Consul client to determine if the value associated to the given key is different to the given target
value.
:param consul_client: Consul connected client
:param key: key in Consul
:param target_value: value to be associated to the key
:return: tuple where the first element is the value of the "X-Consul-Index" header and the second is `True` if the
value has changed (i.e. the stored value is not the target value)
"""
index, existing = consul_client.kv.get(key)
if not existing:
return index, True
try:
changed = to_text(existing['Value'], errors='surrogate_or_strict') != target_value
return index, changed
except UnicodeError:
# Existing value was not decodable but all values we set are valid utf-8
return index, True
def execute(module):
state = module.params.get('state')
if state == 'acquire' or state == 'release':
lock(module, state)
elif state == 'present':
if module.params.get('value') is NOT_SET:
get_value(module)
else:
set_value(module)
elif state == 'absent':
remove_value(module)
else:
module.exit_json(msg="Unsupported state: %s" % (state, ))
def lock(module, state):
consul_api = get_consul_api(module)
session = module.params.get('session')
key = module.params.get('key')
value = module.params.get('value')
if not session:
module.fail(
msg='%s of lock for %s requested but no session supplied' %
(state, key))
index, changed = _has_value_changed(consul_api, key, value)
if changed and not module.check_mode:
if state == 'acquire':
changed = consul_api.kv.put(key, value,
cas=module.params.get('cas'),
acquire=session,
flags=module.params.get('flags'))
else:
changed = consul_api.kv.put(key, value,
cas=module.params.get('cas'),
release=session,
flags=module.params.get('flags'))
module.exit_json(changed=changed,
index=index,
key=key)
def get_value(module):
consul_api = get_consul_api(module)
key = module.params.get('key')
index, existing_value = consul_api.kv.get(key, recurse=module.params.get('recurse'))
module.exit_json(changed=False, index=index, data=existing_value)
def set_value(module):
consul_api = get_consul_api(module)
key = module.params.get('key')
value = module.params.get('value')
if value is NOT_SET:
raise AssertionError('Cannot set value of "%s" to `NOT_SET`' % key)
index, changed = _has_value_changed(consul_api, key, value)
if changed and not module.check_mode:
changed = consul_api.kv.put(key, value,
cas=module.params.get('cas'),
flags=module.params.get('flags'))
stored = None
if module.params.get('retrieve'):
index, stored = consul_api.kv.get(key)
module.exit_json(changed=changed,
index=index,
key=key,
data=stored)
def remove_value(module):
''' remove the value associated with the given key. if the recurse parameter
is set then any key prefixed with the given key will be removed. '''
consul_api = get_consul_api(module)
key = module.params.get('key')
index, existing = consul_api.kv.get(
key, recurse=module.params.get('recurse'))
changed = existing is not None
if changed and not module.check_mode:
consul_api.kv.delete(key, module.params.get('recurse'))
module.exit_json(changed=changed,
index=index,
key=key,
data=existing)
def get_consul_api(module, token=None):
return consul.Consul(host=module.params.get('host'),
port=module.params.get('port'),
scheme=module.params.get('scheme'),
verify=module.params.get('validate_certs'),
token=module.params.get('token'))
def test_dependencies(module):
if not python_consul_installed:
module.fail_json(msg="python-consul required for this module. "
"see https://python-consul.readthedocs.io/en/latest/#installation")
def main():
module = AnsibleModule(
argument_spec=dict(
cas=dict(type='str'),
flags=dict(type='str'),
key=dict(type='str', required=True),
host=dict(type='str', default='localhost'),
scheme=dict(type='str', default='http'),
validate_certs=dict(type='bool', default=True),
port=dict(type='int', default=8500),
recurse=dict(type='bool'),
retrieve=dict(type='bool', default=True),
state=dict(type='str', default='present', choices=['absent', 'acquire', 'present', 'release']),
token=dict(type='str', no_log=True),
value=dict(type='str', default=NOT_SET),
session=dict(type='str'),
),
supports_check_mode=True
)
test_dependencies(module)
try:
execute(module)
except ConnectionError as e:
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
module.params.get('host'), module.params.get('port'), e))
except Exception as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
albertomurillo/ansible | refs/heads/devel | test/units/modules/network/f5/test_bigip_file_copy.py | 16 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_file_copy import ApiParameters
from library.modules.bigip_file_copy import IFileManager
from library.modules.bigip_file_copy import ModuleParameters
from library.modules.bigip_file_copy import ModuleManager
from library.modules.bigip_file_copy import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_file_copy import ApiParameters
from ansible.modules.network.f5.bigip_file_copy import IFileManager
from ansible.modules.network.f5.bigip_file_copy import ModuleParameters
from ansible.modules.network.f5.bigip_file_copy import ModuleManager
from ansible.modules.network.f5.bigip_file_copy import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
source='file.txt',
force=True
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.source == 'file.txt'
assert p.force is True
def test_api_parameters(self):
args = load_fixture('load_sys_file_external-monitor_1.json')
p = ApiParameters(params=args)
assert p.checksum == '0c78e6641632e47d11802b29cfd119d2233cb80a'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
# Configure the arguments that would be sent to the Ansible module
set_module_args(dict(
name='foo',
source='file.txt',
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
required_if=self.spec.required_if
)
tm = IFileManager(module=module)
tm.exists = Mock(return_value=False)
tm.create_on_device = Mock(return_value=True)
tm.upload_to_device = Mock(return_value=True)
tm.remove_uploaded_file_from_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
|
EVEprosper/ProsperTradier | refs/heads/master | tests/utilities.py | 1 | """utilities.py: helper functions for testing"""
from os import path, getenv
import configparser
import logging
HERE = path.abspath(path.dirname(__file__))
ROOT = path.abspath(path.dirname(HERE))
TEST_CONFIG_FILE = path.join(HERE, 'test.cfg')
def parse_test_config(config_filepath=TEST_CONFIG_FILE):
"""loads test.cfg for use in tests
Args:
config_filepath (str): path to config file
Returns:
(:obj:`configparser.ConfigParser)
"""
config_parser = configparser.ConfigParser(
interpolation=configparser.ExtendedInterpolation(),
allow_no_value=True,
delimiters=('='),
inline_comment_prefixes=('#')
)
with open(config_filepath, 'r') as cfg_fh:
config_parser.read_file(cfg_fh)
return config_parser
CONFIG = parse_test_config(TEST_CONFIG_FILE)
def get_config(
section_name,
key_name,
config=CONFIG
):
"""wrapper for configparser.get() to check env for vals
Make travis secret-keeping easier
Args:
section_name (str): config section
key_name (str): config key
config (:obj:`configparser.ConfigParser, optional): config to search
Returns:
(str) value from config or env
"""
try:
value = config.get(section_name, key_name)
except configparser.NoOptionError:
pass
except configparser.NoSectionError:
raise
if value:
return value
else:
env_value = get_value_from_env(section_name, key_name)
if env_value:
return env_value
else:
raise Exception
ENVNAME_PAD = 'PROSPER'
def get_value_from_env(
section_name,
key_name,
envname_pad=ENVNAME_PAD
):
"""check environment for key/value pair
Args:
section_name (str): config section
key_name (str): config key
envname_pad (str, optional): namespace pad
Returns:
(str): value from environment
"""
var_name = '{pad}_{section}__{key}'.format(
pad=envname_pad,
section=section_name,
key=key_name
)
value = getenv(var_name)
return value
def debug_logger():
"""build debug logger"""
logger = logging.getLogger('tradier_test')
log_format = '[%(levelname)s:%(filename)s--%(funcName)s:%(lineno)s] %(message)s'
log_level = 'DEBUG'
log_name = 'tradier_test.log'
logger.setLevel(log_level)
formatter = logging.Formatter(log_format)
## Log to File ##
log_fh = logging.FileHandler(path.join(HERE, log_name))
log_fh.setFormatter(formatter)
logger.addHandler(log_fh)
## Log to stdout ##
log_stdout = logging.StreamHandler()
log_stdout.setFormatter(formatter)
logger.addHandler(log_stdout)
return logger
|
YelaSeamless/mysql-connector-python | refs/heads/master | lib/mysql/connector/dbapi.py | 35 | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
This module implements some constructors and singletons as required by the
DB API v2.0 (PEP-249).
"""
# Python Db API v2
apilevel = '2.0'
threadsafety = 1
paramstyle = 'pyformat'
import time
import datetime
from . import constants
class _DBAPITypeObject(object):
def __init__(self, *values):
self.values = values
def __eq__(self, other):
if other in self.values:
return True
else:
return False
def __ne__(self, other):
if other in self.values:
return False
else:
return True
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DateFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes
STRING = _DBAPITypeObject(*constants.FieldType.get_string_types())
BINARY = _DBAPITypeObject(*constants.FieldType.get_binary_types())
NUMBER = _DBAPITypeObject(*constants.FieldType.get_number_types())
DATETIME = _DBAPITypeObject(*constants.FieldType.get_timestamp_types())
ROWID = _DBAPITypeObject()
|
hkchenhongyi/django | refs/heads/master | tests/m2m_multiple/tests.py | 227 | from __future__ import unicode_literals
from datetime import datetime
from django.test import TestCase
from .models import Article, Category
class M2MMultipleTests(TestCase):
def test_multiple(self):
c1, c2, c3, c4 = [
Category.objects.create(name=name)
for name in ["Sports", "News", "Crime", "Life"]
]
a1 = Article.objects.create(
headline="Parrot steals", pub_date=datetime(2005, 11, 27)
)
a1.primary_categories.add(c2, c3)
a1.secondary_categories.add(c4)
a2 = Article.objects.create(
headline="Parrot runs", pub_date=datetime(2005, 11, 28)
)
a2.primary_categories.add(c1, c2)
a2.secondary_categories.add(c4)
self.assertQuerysetEqual(
a1.primary_categories.all(), [
"Crime",
"News",
],
lambda c: c.name
)
self.assertQuerysetEqual(
a2.primary_categories.all(), [
"News",
"Sports",
],
lambda c: c.name
)
self.assertQuerysetEqual(
a1.secondary_categories.all(), [
"Life",
],
lambda c: c.name
)
self.assertQuerysetEqual(
c1.primary_article_set.all(), [
"Parrot runs",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
c1.secondary_article_set.all(), []
)
self.assertQuerysetEqual(
c2.primary_article_set.all(), [
"Parrot steals",
"Parrot runs",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
c2.secondary_article_set.all(), []
)
self.assertQuerysetEqual(
c3.primary_article_set.all(), [
"Parrot steals",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
c3.secondary_article_set.all(), []
)
self.assertQuerysetEqual(
c4.primary_article_set.all(), []
)
self.assertQuerysetEqual(
c4.secondary_article_set.all(), [
"Parrot steals",
"Parrot runs",
],
lambda a: a.headline
)
|
X-DataInitiative/tick | refs/heads/master | tick/robust/tests/model_epsilon_insensitive_test.py | 2 | # License: BSD 3 clause
import unittest
import numpy as np
from scipy.sparse import csr_matrix
from tick.robust import ModelEpsilonInsensitive
from tick.base_model.tests.generalized_linear_model import TestGLM
from tick.linear_model import SimuLinReg
class Test(TestGLM):
def test_ModelEpsilonInsensitive(self):
"""...Numerical consistency check of loss and gradient for
Epsilon-Insensitive model
"""
np.random.seed(12)
n_samples, n_features = 5000, 10
w0 = np.random.randn(n_features)
c0 = np.random.randn()
# First check with intercept
X, y = SimuLinReg(w0, c0, n_samples=n_samples,
verbose=False).simulate()
X_spars = csr_matrix(X)
model = ModelEpsilonInsensitive(fit_intercept=True,
threshold=1.13).fit(X, y)
model_spars = ModelEpsilonInsensitive(fit_intercept=True,
threshold=1.13).fit(X_spars, y)
self.run_test_for_glm(model, model_spars)
self._test_glm_intercept_vs_hardcoded_intercept(model)
# Then check without intercept
X, y = SimuLinReg(w0, None, n_samples=n_samples, verbose=False,
seed=2038).simulate()
X_spars = csr_matrix(X)
model = ModelEpsilonInsensitive(fit_intercept=False).fit(X, y)
model_spars = ModelEpsilonInsensitive(fit_intercept=False).fit(
X_spars, y)
self.run_test_for_glm(model, model_spars)
def test_ModelEpsilonInsensitive_threshold(self):
np.random.seed(12)
n_samples, n_features = 5000, 10
w0 = np.random.randn(n_features)
c0 = np.random.randn()
# First check with intercept
X, y = SimuLinReg(w0, c0, n_samples=n_samples,
verbose=False).simulate()
model = ModelEpsilonInsensitive(threshold=1.541).fit(X, y)
self.assertEqual(model._model.get_threshold(), 1.541)
model.threshold = 3.14
self.assertEqual(model._model.get_threshold(), 3.14)
msg = '^threshold must be > 0$'
with self.assertRaisesRegex(RuntimeError, msg):
model = ModelEpsilonInsensitive(threshold=-1).fit(X, y)
with self.assertRaisesRegex(RuntimeError, msg):
model.threshold = 0.
if __name__ == '__main__':
unittest.main()
|
nickanderson/ansible | refs/heads/devel | v2/ansible/compat/__init__.py | 332 | # (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat library for ansible. This contains compatibility definitions for older python
When we need to import a module differently depending on python version, do it
here. Then in the code we can simply import from compat in order to get what we want.
'''
|
sinistance/PokemonGo-Bot | refs/heads/dev | pokemongo_bot/walkers/polyline_generator.py | 23 | import time
from itertools import chain
from math import ceil
import haversine
import polyline
import requests
class Polyline(object):
def __init__(self, origin, destination, speed):
self.DISTANCE_API_URL='https://maps.googleapis.com/maps/api/directions/json?mode=walking'
self.origin = origin
self.destination = destination
self.URL = '{}&origin={}&destination={}'.format(self.DISTANCE_API_URL,
'{},{}'.format(*self.origin),
'{},{}'.format(*self.destination))
self.request_responce = requests.get(self.URL).json()
try:
self.polyline_points = [x['polyline']['points'] for x in
self.request_responce['routes'][0]['legs'][0]['steps']]
except IndexError:
self.polyline_points = self.request_responce['routes']
self.speed = float(speed)
self.points = [self.origin] + self.get_points(self.polyline_points) + [self.destination]
self.lat, self.long = self.points[0][0], self.points[0][1]
self.polyline = self.combine_polylines(self.points)
self._timestamp = time.time()
self.is_paused = False
self._last_paused_timestamp = None
self._paused_total = 0.0
def reset_timestamps(self):
self._timestamp = time.time()
self.is_paused = False
self._last_paused_timestamp = None
self._paused_total = 0.0
def get_points(self, polyline_points):
crd_points = []
for points in polyline_points:
crd_points += polyline.decode(points)
crd_points = [x for n,x in enumerate(crd_points) if x not in crd_points[:n]]
return crd_points
def combine_polylines(self, points):
return polyline.encode(points)
def pause(self):
if not self.is_paused:
self.is_paused = True
self._last_paused_timestamp = time.time()
def unpause(self):
if self.is_paused:
self.is_paused = False
self._paused_total += time.time() - self._last_paused_timestamp
self._last_paused_timestamp = None
def walk_steps(self):
if self.points:
walk_steps = zip(chain([self.points[0]], self.points),
chain(self.points, [self.points[-1]]))
walk_steps = filter(None, [(o, d) if o != d else None for o, d in walk_steps])
# consume the filter as list https://github.com/th3w4y/PokemonGo-Bot/issues/27
return list(walk_steps)
else:
return []
def get_pos(self):
walked_distance = 0.0
if not self.is_paused:
time_passed = time.time()
else:
time_passed = self._last_paused_timestamp
time_passed_distance = self.speed * abs(time_passed - self._timestamp - self._paused_total)
# check if there are any steps to take https://github.com/th3w4y/PokemonGo-Bot/issues/27
if self.walk_steps():
steps_dict = {}
for step in self.walk_steps():
walked_distance += haversine.haversine(*step)*1000
steps_dict[walked_distance] = step
for walked_end_step in sorted(steps_dict.keys()):
if walked_end_step >= time_passed_distance:
break
step_distance = haversine.haversine(*steps_dict[walked_end_step])*1000
if walked_end_step >= time_passed_distance:
percentage_walked = (time_passed_distance - (walked_end_step - step_distance)) / step_distance
else:
percentage_walked = 1.0
return self.calculate_coord(percentage_walked, *steps_dict[walked_end_step])
else:
# otherwise return the destination https://github.com/th3w4y/PokemonGo-Bot/issues/27
return [self.points[-1]]
def calculate_coord(self, percentage, o, d):
# If this is the destination then returning as such
if self.points[-1] == d:
return [d]
else:
# intermediary points returned with 5 decimals precision only
# this ensures ~3-50cm ofset from the geometrical point calculated
lat = o[0]+ (d[0] -o[0]) * percentage
lon = o[1]+ (d[1] -o[1]) * percentage
return [(round(lat, 5), round(lon, 5))]
def get_total_distance(self):
return ceil(sum([haversine.haversine(*x)*1000 for x in self.walk_steps()]))
|
plumgrid/plumgrid-nova | refs/heads/master | nova/scheduler/utils.py | 1 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
import sys
from nova.compute import flavors
from nova.compute import utils as compute_utils
from nova import db
from nova import notifications
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common.notifier import api as notifier
LOG = logging.getLogger(__name__)
def build_request_spec(ctxt, image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
instance_type = flavors.extract_flavor(instance)
# NOTE(comstud): This is a bit ugly, but will get cleaned up when
# we're passing an InstanceType internal object.
extra_specs = db.flavor_extra_specs_get(ctxt,
instance_type['flavorid'])
instance_type['extra_specs'] = extra_specs
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': instance_type,
'num_instances': len(instances),
# NOTE(alaski): This should be removed as logic moves from the
# scheduler to conductor. Provides backwards compatibility now.
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
def set_vm_state_and_notify(context, service, method, updates, ex,
request_spec, db):
"""changes VM state and notifies."""
LOG.warning(_("Failed to %(service)s_%(method)s: %(ex)s"),
{'service': service, 'method': method, 'ex': ex})
vm_state = updates['vm_state']
properties = request_spec.get('instance_properties', {})
# NOTE(vish): We shouldn't get here unless we have a catastrophic
# failure, so just set all instances to error. if uuid
# is not set, instance_uuids will be set to [None], this
# is solely to preserve existing behavior and can
# be removed along with the 'if instance_uuid:' if we can
# verify that uuid is always set.
uuids = [properties.get('uuid')]
from nova.conductor import api as conductor_api
for instance_uuid in request_spec.get('instance_uuids') or uuids:
if instance_uuid:
state = vm_state.upper()
LOG.warning(_('Setting instance to %s state.'), state,
instance_uuid=instance_uuid)
# update instance state and notify on the transition
(old_ref, new_ref) = db.instance_update_and_get_original(
context, instance_uuid, updates)
notifications.send_update(context, old_ref, new_ref,
service=service)
compute_utils.add_instance_fault_from_exc(context,
conductor_api.LocalAPI(),
new_ref, ex, sys.exc_info())
payload = dict(request_spec=request_spec,
instance_properties=properties,
instance_id=instance_uuid,
state=vm_state,
method=method,
reason=ex)
event_type = '%s.%s' % (service, method)
notifier.notify(context, notifier.publisher_id(service),
event_type, notifier.ERROR, payload)
def populate_filter_properties(filter_properties, host_state):
"""Add additional information to the filter properties after a node has
been selected by the scheduling process.
"""
if isinstance(host_state, dict):
host = host_state['host']
nodename = host_state['nodename']
limits = host_state['limits']
else:
host = host_state.host
nodename = host_state.nodename
limits = host_state.limits
# Adds a retry entry for the selected compute host and node:
_add_retry_host(filter_properties, host, nodename)
# Adds oversubscription policy
filter_properties['limits'] = limits
def _add_retry_host(filter_properties, host, node):
"""Add a retry entry for the selected compute node. In the event that
the request gets re-scheduled, this entry will signal that the given
node has already been tried.
"""
retry = filter_properties.get('retry', None)
if not retry:
return
hosts = retry['hosts']
hosts.append([host, node])
|
ctrlaltdylan/CouchPotato | refs/heads/master | library/minify/js.py | 10 | #!/usr/bin/python
# This code is original from jsmin by Douglas Crockford, it was translated to
# Python by Baruch Even. The original code had the following copyright and
# license.
#
# /* jsmin.c
# 2007-05-22
#
# Copyright (c) 2002 Douglas Crockford (www.crockford.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# The Software shall be used for Good, not Evil.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# */
from StringIO import StringIO
def jsmin(js):
ins = StringIO(js)
outs = StringIO()
JavascriptMinify().minify(ins, outs)
str = outs.getvalue()
if len(str) > 0 and str[0] == '\n':
str = str[1:]
return str
def isAlphanum(c):
"""return true if the character is a letter, digit, underscore,
dollar sign, or non-ASCII character.
"""
return ((c >= 'a' and c <= 'z') or (c >= '0' and c <= '9') or
(c >= 'A' and c <= 'Z') or c == '_' or c == '$' or c == '\\' or (c is not None and ord(c) > 126));
class UnterminatedComment(Exception):
pass
class UnterminatedStringLiteral(Exception):
pass
class UnterminatedRegularExpression(Exception):
pass
class JavascriptMinify(object):
def _outA(self):
self.outstream.write(self.theA)
def _outB(self):
self.outstream.write(self.theB)
def _get(self):
"""return the next character from stdin. Watch out for lookahead. If
the character is a control character, translate it to a space or
linefeed.
"""
c = self.theLookahead
self.theLookahead = None
if c == None:
c = self.instream.read(1)
if c >= ' ' or c == '\n':
return c
if c == '': # EOF
return '\000'
if c == '\r':
return '\n'
return ' '
def _peek(self):
self.theLookahead = self._get()
return self.theLookahead
def _next(self):
"""get the next character, excluding comments. peek() is used to see
if an unescaped '/' is followed by a '/' or '*'.
"""
c = self._get()
if c == '/' and self.theA != '\\':
p = self._peek()
if p == '/':
c = self._get()
while c > '\n':
c = self._get()
return c
if p == '*':
c = self._get()
while 1:
c = self._get()
if c == '*':
if self._peek() == '/':
self._get()
return ' '
if c == '\000':
raise UnterminatedComment()
return c
def _action(self, action):
"""do something! What you do is determined by the argument:
1 Output A. Copy B to A. Get the next B.
2 Copy B to A. Get the next B. (Delete A).
3 Get the next B. (Delete B).
action treats a string as a single character. Wow!
action recognizes a regular expression if it is preceded by ( or , or =.
"""
if action <= 1:
self._outA()
if action <= 2:
self.theA = self.theB
if self.theA == "'" or self.theA == '"':
while 1:
self._outA()
self.theA = self._get()
if self.theA == self.theB:
break
if self.theA <= '\n':
raise UnterminatedStringLiteral()
if self.theA == '\\':
self._outA()
self.theA = self._get()
if action <= 3:
self.theB = self._next()
if self.theB == '/' and (self.theA == '(' or self.theA == ',' or
self.theA == '=' or self.theA == ':' or
self.theA == '[' or self.theA == '?' or
self.theA == '!' or self.theA == '&' or
self.theA == '|' or self.theA == ';' or
self.theA == '{' or self.theA == '}' or
self.theA == '\n'):
self._outA()
self._outB()
while 1:
self.theA = self._get()
if self.theA == '/':
break
elif self.theA == '\\':
self._outA()
self.theA = self._get()
elif self.theA <= '\n':
raise UnterminatedRegularExpression()
self._outA()
self.theB = self._next()
def _jsmin(self):
"""Copy the input to the output, deleting the characters which are
insignificant to JavaScript. Comments will be removed. Tabs will be
replaced with spaces. Carriage returns will be replaced with linefeeds.
Most spaces and linefeeds will be removed.
"""
self.theA = '\n'
self._action(3)
while self.theA != '\000':
if self.theA == ' ':
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
elif self.theA == '\n':
if self.theB in ['{', '[', '(', '+', '-']:
self._action(1)
elif self.theB == ' ':
self._action(3)
else:
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
else:
if self.theB == ' ':
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
elif self.theB == '\n':
if self.theA in ['}', ']', ')', '+', '-', '"', '\'']:
self._action(1)
else:
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
else:
self._action(1)
def minify(self, instream, outstream):
self.instream = instream
self.outstream = outstream
self.theA = '\n'
self.theB = None
self.theLookahead = None
self._jsmin()
self.instream.close()
#if __name__ == '__main__':
# import sys
# jsm = JavascriptMinify()
# jsm.minify(sys.stdin, sys.stdout)
|
ecatkins/instabilly | refs/heads/master | project/manage.py | 404 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
g-wydrzynski/YASim | refs/heads/master | thirdparty/gtest-1.7.0/test/gtest_env_var_test.py | 2408 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
|
vlachoudis/sl4a | refs/heads/master | python/src/Demo/tkinter/matt/canvas-reading-tag-info.py | 47 | from Tkinter import *
class Test(Frame):
def printit(self):
print "hi"
def createWidgets(self):
self.QUIT = Button(self, text='QUIT', foreground='red',
command=self.quit)
self.QUIT.pack(side=BOTTOM, fill=BOTH)
self.drawing = Canvas(self, width="5i", height="5i")
# make a shape
pgon = self.drawing.create_polygon(
10, 10, 110, 10, 110, 110, 10 , 110,
fill="red", tags=("weee", "foo", "groo"))
# this is how you query an object for its attributes
# config options FOR CANVAS ITEMS always come back in tuples of length 5.
# 0 attribute name
# 1 BLANK
# 2 BLANK
# 3 default value
# 4 current value
# the blank spots are for consistency with the config command that
# is used for widgets. (remember, this is for ITEMS drawn
# on a canvas widget, not widgets)
option_value = self.drawing.itemconfig(pgon, "stipple")
print "pgon's current stipple value is -->", option_value[4], "<--"
option_value = self.drawing.itemconfig(pgon, "fill")
print "pgon's current fill value is -->", option_value[4], "<--"
print " when he is usually colored -->", option_value[3], "<--"
## here we print out all the tags associated with this object
option_value = self.drawing.itemconfig(pgon, "tags")
print "pgon's tags are", option_value[4]
self.drawing.pack(side=LEFT)
def __init__(self, master=None):
Frame.__init__(self, master)
Pack.config(self)
self.createWidgets()
test = Test()
test.mainloop()
|
was4444/chromium.src | refs/heads/nw15 | tools/perf/page_sets/top_desktop_sites_2012Q3.py | 21 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
TOP_2013_URLS = [
'http://www.facebook.com/barackobama',
'https://www.google.com/search?q=barack%20obama',
'http://youtube.com',
'http://yahoo.com',
'http://www.baidu.com/s?wd=barack+obama',
'http://en.wikipedia.org/wiki/Wikipedia',
'http://qq.com',
'http://www.amazon.com/Kindle-Fire-Amazon-Tablet/dp/B0051VVOB2',
'http://googleblog.blogspot.com/',
'http://taobao.com',
'http://www.linkedin.com/in/linustorvalds',
'http://yahoo.co.jp',
'http://sina.com.cn',
'http://msn.com',
'http://yandex.ru/yandsearch?text=barack+obama',
'http://translation.babylon.com/',
'http://www.bing.com/search?q=barack+obama',
'http://wordpress.org/news/',
'http://www.ebay.com/sch/i.html?_nkw=antiques',
'http://163.com',
'http://www.soso.com/q?w=barack+obama',
'http://www.microsoft.com/en-us/default.aspx',
'http://go.mail.ru/search?mailru=1&mg=1&q=barack+obama',
'http://vk.com/id118712387',
'http://staff.tumblr.com/',
'http://sohu.com',
'http://sfbay.craigslist.org/mis/',
'http://www.ask.com/web?q=barack+obama&search=&qsrc=0&o=0&l=dir',
'http://www.apple.com/ipodtouch/',
'http://blog.pinterest.com/',
'http://pinterest.com/backdrophome/',
'http://paypal.com',
'http://bbc.co.uk',
'http://www.avg.com/us-en/avg-premium-security',
'http://googlesystem.blogspot.com/',
'http://tudou.com',
'http://blog.fc2.com/en/',
'http://imdb.com',
'http://youku.com',
'http://www.flickr.com/photos/thomashawk/',
'http://www.flickr.com/photos/thomashawk/sets/72157600284219965/detail/',
# pylint: disable=line-too-long
'http://search.yahoo.com/search?ei=UTF-8&trackingType=go_search_home&p=barack+obama&fr=hsusgo1&sa.x=0&sa.y=0',
'http://www.conduit.com/',
'http://ifeng.com',
'http://tmall.com',
'http://hao123.com',
'http://aol.com',
'http://zedo.com',
# pylint: disable=line-too-long
'http://search.mywebsearch.com/mywebsearch/GGmain.jhtml?searchfor=barack+obama',
'http://cnn.com',
'http://portal.ebay.de/deutschland-schraubt-angebote',
'http://www.adobe.com/products/photoshopfamily.html?promoid=JOLIW',
'http://global.rakuten.com/us/',
# pylint: disable=line-too-long
'http://laundry.about.com/od/kidsandlaundry/f/How-Do-I-Wash-A-Backpack.htm',
'http://thepiratebay.se/search/barack%20obama/0/99/0',
'http://360buy.com',
'http://huffingtonpost.com',
'http://alibaba.com',
'http://chinaz.com',
'http://www.sogou.com/web?query=barack+obama',
# pylint: disable=line-too-long
('http://www.amazon.de/gp/product/B0051QVF7A/ref=amb_link_170625867_1/'
'275-4711375-4099801?ie=UTF8&nav_sdd=aps&pf_rd_m=A3JWKAKR8XB7XF&'
'pf_rd_s=center-1&pf_rd_r=1C0XDBPB12WHDM63V11R&pf_rd_t=101&pf_rd_p'
'=320475427&pf_rd_i=301128'),
'http://google.pl',
'http://mediafire.com',
'http://espn.go.com',
'http://uol.com.br',
'http://www.godaddy.com/products/secure-hosting.aspx?ci=72738',
'http://imgur.com/gallery/b90ZE',
'http://home.alipay.com/bank/paymentKJ.htm',
'http://amazon.co.jp',
# pylint: disable=line-too-long
'http://stackoverflow.com/questions/11227809/why-is-processing-a-sorted-array-faster-than-an-unsorted-array',
'http://www.google.com/doubleclick/',
'http://search.4shared.com/q/CCAD/1/barack%20obama',
'http://dailymotion.com',
'http://globo.com',
'http://instagram.com/developer/',
'http://livedoor.com',
'http://wordpress.org/showcase/',
'http://bp.blogspot.com',
'http://wigetmedia.com/advertisers',
'http://www.search-results.com/web?&q=barack%20obama',
'http://cnet.com',
'http://nytimes.com',
'http://torrentz.eu/search?f=barack+obama',
'http://livejournal.com',
'http://douban.com',
'http://www.weather.com/weather/right-now/Mountain+View+CA+94043',
'http://dailymail.co.uk',
'http://www.tianya.cn/bbs/index.shtml',
'http://ehow.com',
'http://theproject.badoo.com/final.phtml',
# pylint: disable=line-too-long
'http://www.bankofamerica.com/deposits/checksave/index.cfm?template=check_eBanking',
'http://vimeo.com',
'http://360.cn',
'http://indiatimes.com',
'http://deviantart.com',
'http://reddit.com',
'http://aweber.com',
'http://warriorforum.com',
'http://spiegel.de',
'http://pconline.com.cn',
'http://mozilla.org',
'http://booking.com',
'http://goo.ne.jp',
'https://www.chase.com/online/Home-Lending/mortgages.htm',
'http://addthis.com',
'http://56.com',
'http://news.blogfa.com/',
'http://www.stumbleupon.com/jobs',
'https://www.dropbox.com/about',
'http://www.clicksor.com/publishers/adformat',
'http://answers.com',
'http://en.softonic.com/',
'http://walmart.com',
'http://pengyou.com',
'http://outbrain.com',
'http://comcast.net',
'http://foxnews.com',
'http://photobucket.com/findstuff/photography%20styles/',
'http://bleach.wikia.com/?redirect=no',
'http://sourceforge.net/projects/xoops/?source=frontpage&position=1',
'http://onet.pl',
'http://guardian.co.uk',
# pylint: disable=line-too-long
'https://www.wellsfargo.com/jump/enterprise/doublediscount?msc=5589&mplx=10918-70119-3408-64',
'http://wikimediafoundation.org/wiki/Home',
'http://xunlei.com',
'http://as.58.com/shuma/',
'http://skype.com',
'http://etsy.com',
'http://bild.de',
# pylint: disable=line-too-long
'http://search.naver.com/search.naver?where=nexearch&query=barack+obama&sm=top_hty&fbm=0&ie=utf8',
'http://statcounter.com/features/?PHPSESSID=bbjcvjr681bcul4vqvgq2qgmo7',
'http://iqiyi.com',
'http://fbcdn.net',
'http://www.myspace.com/browse/people',
'http://allegro.pl/antyki-i-sztuka',
'http://yesky.com',
'http://justbeenpaid.com',
'http://adultfriendfinder.com',
'http://fiverr.com',
'http://www.leboncoin.fr/annonces/offres/centre/',
'http://dictionary.reference.com/',
'http://realtime.rediff.com/instasearch#!barack%20obama',
'http://zol.com.cn',
'http://optmd.com',
'http://www.filestube.com/search.html?q=barack+obama&select=All',
'http://xinhuanet.com',
'http://www.salesforce.com/sales-cloud/overview/',
# pylint: disable=line-too-long
'http://www.squidoo.com/make-cards-and-gift-bags-with-antique-photos',
'http://www.domaintools.com/research/',
'http://download.cnet.com/windows/?tag=hdr;brandnav',
'https://rapidshare.com/#!shop',
'http://people.com.cn',
'http://ucoz.ru',
'http://free.fr',
'http://nicovideo.jp',
# pylint: disable=line-too-long
'http://www.yelp.com/search?find_desc=food&find_loc=San+Jose%2C+CA&ns=1',
'http://slideshare.net',
'http://archive.org/web/web.php',
'http://www.cntv.cn/index.shtml',
'http://english.cntv.cn/01/index.shtml',
'http://abonnez-vous.orange.fr/residentiel/accueil/accueil.aspx',
'http://v.it168.com/',
'http://nbcolympics.com',
'http://hootsuite.com',
# pylint: disable=line-too-long
'http://www.scribd.com/doc/52210329/The-Masters-Augusta-National-s-Amen-Corner-up-close',
'http://themeforest.net',
'http://4399.com',
'http://www.soku.com/v?keyword=barack%20obama',
'http://google.se',
'http://funmoods.com',
'http://csdn.net',
'http://telegraph.co.uk',
'http://taringa.net',
# pylint: disable=line-too-long
'http://www.tripadvisor.com/Tourism-g32701-Mendocino_California-Vacations.html',
'http://pof.com',
'http://wp.pl',
'http://soundcloud.com/flosstradamus/tracks',
'http://w3schools.com/html/default.asp',
'http://ameblo.jp/staff/',
'http://wsj.com',
'http://web.de',
'http://sweetim.com',
'http://rambler.ru',
'http://gmx.net',
'http://www.indeed.com/jobs?q=software&l=Mountain+View%2C+CA',
'http://ilivid.com',
'http://www.xing.com/search/people?search%5Bq%5D=lufthansa',
'http://reuters.com',
'http://hostgator.com',
'http://www.ikea.com/us/en/catalog/categories/departments/living_room/',
'http://www.kaixin001.com/award2012/wenming/index.php',
'http://ku6.com',
'http://libero.it',
'http://samsung.com',
'http://hudong.com',
'http://espncricinfo.com',
'http://china.com',
# pylint: disable=line-too-long
'http://www.ups.com/content/us/en/bussol/browse/smallbiz/new-to-ups.html?WT.svl=SolExp',
'http://letv.com',
'http://ero-advertising.com',
'http://mashable.com',
'http://iminent.com',
'http://rutracker.org',
# pylint: disable=line-too-long
'http://www.shopping.hp.com/en_US/home-office/-/products/Laptops/Laptops',
# pylint: disable=line-too-long
'http://www.clickbank.com/buy_products.htm?dores=true&mainCategoryId=1340&sortField=POPULARITY&b1=1340',
'http://b.hatena.ne.jp/',
# pylint: disable=line-too-long
'http://www.youdao.com/search?q=barack+obama&ue=utf8&keyfrom=web.index',
'http://forbes.com',
'http://nbcnews.com',
'http://bitauto.com',
'http://php.net',
'http://www.target.com/c/women/-/N-5xtd3#?lnk=nav_t_spc_1_0',
'http://dianxin.cn',
'http://www.aizhan.com/siteall/www.youboy.com/',
'http://veiculos-home.mercadolivre.com.br/',
'http://kakaku.com',
'http://flipkart.com',
'http://paipai.com'
]
class Top2012Q3Page(page.Page):
def __init__(self, url, ps):
super(Top2012Q3Page, self).__init__(
url=url, page_set=ps, credentials_path = 'data/credentials.json')
self.archive_data_file = 'data/2012Q3.json'
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class Top2012Q3PageSet(story.StorySet):
""" Pages hand-picked from top-lists in Q32012. """
def __init__(self):
super(Top2012Q3PageSet, self).__init__(
archive_data_file='data/2012Q3.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
for url in TOP_2013_URLS:
self.AddStory(Top2012Q3Page(url, self))
|
kenshay/ImageScripter | refs/heads/master | ProgramData/SystemFiles/Python/Lib/site-packages/elan/Video_Camera.py | 2 | import sys
import os
import sys
import os
import hashlib
import time
from threading import Thread
import sys
from PyQt4 import QtGui,uic
from threading import Thread
from time import sleep
import os
import os
import datetime
import win32security
import win32api
import sys
import os, shutil
import shutil
import time
from ntsecuritycon import *
from pytank.Core import Settings
from desktopmagic.screengrab_win32 import getScreenAsImage as getMultiScreenAsImage
from pytank.Core import Elan_Paths
multi_image_folder = r"C:\Elan_Tools\ImageScripter\ProgramData\Video_Multi_Monitor"
DVR_DELETE_TIME = 200 #seconds
Delete_Duplicates = False #Keep off, old way of doing this
debug_is_on = False
erase_old_files = True
def chunk_reader(fobj, chunk_size=1024):
"""Generator that reads a file in chunks of bytes"""
while True:
chunk = fobj.read(chunk_size)
if not chunk:
return
yield chunk
from PIL import Image
def resize_image(input_image_path,
output_image_path,
size):
original_image = Image.open(input_image_path)
width, height = original_image.size
print('The original image size is {wide} wide x {height} '
'high'.format(wide=width, height=height))
resized_image = original_image.resize(size)
width, height = resized_image.size
print('The resized image size is {wide} wide x {height} '
'high'.format(wide=width, height=height))
resized_image.show()
resized_image.save(output_image_path)
def check_for_duplicates_and_delete_dup(paths, hash=hashlib.sha1):
#print("check_for_duplicates_and_delete_dup")
hashes = {}
for path in paths:
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
hashobj = hash()
for chunk in chunk_reader(open(full_path, 'rb')):
hashobj.update(chunk)
file_id = (hashobj.digest(), os.path.getsize(full_path))
duplicate = hashes.get(file_id, None)
if duplicate:
#print "Duplicate found: %s and %s" % (full_path, duplicate)
try:
os.remove(duplicate)
except:
pass
else:
hashes[file_id] = full_path
def Get_Time_Stamp_Backwords():
import datetime
now = datetime.datetime.now()
hour = str(now.hour)
if len(hour) < 2:
hour = "0" + hour
minute = str(now.minute)
if len(minute) < 2:
minute = "0" + minute
second = str(now.second)
if len(second) < 2:
second = "0" + second
astring = hour + minute + second
'''
if len(astring) == 4:
lastLetter = astring[-1:]
astring = astring[:-1]
astring = astring + '0'
astring = astring + lastLetter
if len(astring) == 5:
lastLetter = astring[-1:]
astring = astring[:-1]
astring = astring + '0'
astring = astring + lastLetter
'''
#astring = str(now.second) + '_' + str(now.minute) + '_' + str(now.hour) + '_' + str(now.day) + '_' + str(now.month) + '_' + str(now.year)
return astring
class Video_Camera_Class(QtGui.QWidget):
def __init__(self):
super(Video_Camera_Class, self).__init__()
self.deleteThenCreateFolder()
#if Delete_Duplicates == True:
# self.delete_Duplicates_Thread = Thread(target = self.DeleteDuplicatsLoop)
# self.delete_Duplicates_Thread.start()
#else:
# print("Duplicate Delete is Disabled")
self.capture()
def DeleteDuplicatsLoop(self):
while True:
#print('????????????????????????????')#Delete Dup
try:
check_for_duplicates_and_delete_dup([multi_image_folder])
except:
pass
def deleteThenCreateFolder(self):
try:
shutil.rmtree(Settings.Multi_Monitor_Video_LocationPath)
print('Removed zip folder')
except Exception as e:
print(e)
print('Creating new zip folder')
for i in range(100):
try:
os.mkdir(Settings.Multi_Monitor_Video_LocationPath)
break
except:
sleep(1)
def Save_Multi_Monitor_ScreenShot(self):
import os
###########################################Delete if over 250
#if Settings.Multi_Monitor_Pic_Count >= 250:
# self.deleteThenCreateFolder()
# Settings.Multi_Monitor_Pic_Count = 1
#################################################Erase old files
if erase_old_files == True:
dir_to_search = "C:\Elan_Tools\ImageScripter\ProgramData\Video_Multi_Monitor"
for dirpath, dirnames, filenames in os.walk(dir_to_search):
for file in filenames:
curpath = os.path.join(dirpath, file)
file_modified = datetime.datetime.fromtimestamp(os.path.getmtime(curpath))
if datetime.datetime.now() - file_modified > datetime.timedelta(seconds = DVR_DELETE_TIME):
#print('Deleting Old -> ' + curpath )
#Move To Backup
if not os.path.exists(Elan_Paths.backup_image_folder):
os.makedirs(Elan_Paths.backup_image_folder)
try:
shutil.copy2(curpath,Elan_Paths.backup_image_folder)
except Exception as e:
print('Exceptoion 180' + str(e))
os.remove(curpath)
#Settings.Multi_Monitor_Pic_Count = 1
Settings.Multi_Monitor_Pic_Count += 1
from PIL import Image
#print(Settings.Multi_Monitor_Pic_Count)
int_name = Settings.Multi_Monitor_Pic_Count
past_Loc = os.getcwd()
os.chdir(Settings.Multi_Monitor_Video_LocationPath)
im = getMultiScreenAsImage()
#name_Of_Image = str(int_name) + ".png"
name_Of_Image = Get_Time_Stamp_Backwords() + ".png"
#########################################
try:
import os.path
path = r"C:\Elan_Tools\ImageScripter\ProgramData\Target_Images\target.png"
if os.path.isfile(path) == True:
target_Image = Image.open(path,'r')
target_Image_width, target_Image_height = target_Image.size
#target_Image = target_Image.resize(target_Image_width * 8,target_Image_height * 8)
target_Image = target_Image.resize((target_Image_width * 5, target_Image_height * 5), Image.ANTIALIAS)
im.paste(target_Image,(0,0))
else:
pass
except Exception as e:
print(e)
print('error')
################################################################
im.save(name_Of_Image, format='png',quality=5,optimize=True)
if debug_is_on == True:
print(name_Of_Image)
#im.save(name_Of_Image, format='png')
#print("name of image -> " + name_Of_Image)
os.chdir(past_Loc)
def capture(self):
while True:
#sleep(1)
self.Save_Multi_Monitor_ScreenShot()
def main():
app = QtGui.QApplication(sys.argv)
vc = Video_Camera_Class()
sys.exit(app.exec_())
main() |
Arafatk/sympy | refs/heads/master | sympy/polys/domains/pythonrational.py | 70 | """Rational number type based on Python integers. """
from __future__ import print_function, division
import operator
from sympy.polys.polyutils import PicklableWithSlots
from sympy.polys.domains.domainelement import DomainElement
from sympy.core.compatibility import integer_types
from sympy.core.sympify import converter
from sympy.core.numbers import Rational
from sympy.printing.defaults import DefaultPrinting
from sympy.utilities import public
@public
class PythonRational(DefaultPrinting, PicklableWithSlots, DomainElement):
"""
Rational number type based on Python integers.
This was supposed to be needed for compatibility with older Python
versions which don't support Fraction. However, Fraction is very
slow so we don't use it anyway.
Examples
========
>>> from sympy.polys.domains import PythonRational
>>> PythonRational(1)
1
>>> PythonRational(2, 3)
2/3
>>> PythonRational(14, 10)
7/5
"""
__slots__ = ['p', 'q']
def parent(self):
from sympy.polys.domains import PythonRationalField
return PythonRationalField()
def __init__(self, p, q=1):
if not q:
raise ZeroDivisionError('rational number')
elif q < 0:
p, q = -p, -q
if not p:
self.p = 0
self.q = 1
elif p == 1 or q == 1:
self.p = p
self.q = q
else:
x, y = p, q
while y:
x, y = y, x % y
if x != 1:
p //= x
q //= x
self.p = p
self.q = q
@classmethod
def new(cls, p, q):
obj = object.__new__(cls)
obj.p = p
obj.q = q
return obj
def __hash__(self):
if self.q == 1:
return hash(self.p)
else:
return hash((self.p, self.q))
def __int__(self):
p, q = self.p, self.q
if p < 0:
return -(-p//q)
return p//q
def __float__(self):
return float(self.p)/self.q
def __abs__(self):
return self.new(abs(self.p), self.q)
def __pos__(self):
return self.new(+self.p, self.q)
def __neg__(self):
return self.new(-self.p, self.q)
def __add__(self, other):
if isinstance(other, PythonRational):
p = self.p*other.q + self.q*other.p
q = self.q*other.q
elif isinstance(other, integer_types):
p = self.p + self.q*other
q = self.q
else:
return NotImplemented
return self.__class__(p, q)
def __radd__(self, other):
if not isinstance(other, integer_types):
return NotImplemented
p = self.p + self.q*other
q = self.q
return self.__class__(p, q)
def __sub__(self, other):
if isinstance(other, PythonRational):
p = self.p*other.q - self.q*other.p
q = self.q*other.q
elif isinstance(other, integer_types):
p = self.p - self.q*other
q = self.q
else:
return NotImplemented
return self.__class__(p, q)
def __rsub__(self, other):
if not isinstance(other, integer_types):
return NotImplemented
p = self.q*other - self.p
q = self.q
return self.__class__(p, q)
def __mul__(self, other):
if isinstance(other, PythonRational):
p = self.p*other.p
q = self.q*other.q
elif isinstance(other, integer_types):
p = self.p*other
q = self.q
else:
return NotImplemented
return self.__class__(p, q)
def __rmul__(self, other):
if not isinstance(other, integer_types):
return NotImplemented
p = self.p*other
q = self.q
return self.__class__(p, q)
def __div__(self, other):
if isinstance(other, PythonRational):
p = self.p*other.q
q = self.q*other.p
elif isinstance(other, integer_types):
p = self.p
q = self.q*other
else:
return NotImplemented
return self.__class__(p, q)
__truediv__ = __div__
def __rdiv__(self, other):
if not isinstance(other, integer_types):
return NotImplemented
p = self.q*other
q = self.p
return self.__class__(p, q)
__rtruediv__ = __rdiv__
def __mod__(self, other):
return self.__class__(0)
def __divmod__(self, other):
return (self//other, self % other)
def __pow__(self, exp):
p, q = self.p, self.q
if exp < 0:
p, q, exp = q, p, -exp
return self.new(p**exp, q**exp)
def __nonzero__(self):
return self.p != 0
__bool__ = __nonzero__
def __eq__(self, other):
if isinstance(other, PythonRational):
return self.q == other.q and self.p == other.p
elif isinstance(other, integer_types):
return self.q == 1 and self.p == other
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def _cmp(self, other, op):
try:
diff = self - other
except TypeError:
return NotImplemented
else:
return op(diff.p, 0)
def __lt__(self, other):
return self._cmp(other, operator.lt)
def __le__(self, other):
return self._cmp(other, operator.le)
def __gt__(self, other):
return self._cmp(other, operator.gt)
def __ge__(self, other):
return self._cmp(other, operator.ge)
@property
def numer(self):
return self.p
@property
def denom(self):
return self.q
numerator = numer
denominator = denom
def sympify_pythonrational(arg):
return Rational(arg.p, arg.q)
converter[PythonRational] = sympify_pythonrational
|
xkcd1253/SocialNetworkforTwo | refs/heads/master | flask/lib/python2.7/site-packages/migrate/versioning/schemadiff.py | 52 | """
Schema differencing support.
"""
import logging
import sqlalchemy
from sqlalchemy.types import Float
log = logging.getLogger(__name__)
def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None):
"""
Return differences of model against database.
:return: object which will evaluate to :keyword:`True` if there \
are differences else :keyword:`False`.
"""
db_metadata = sqlalchemy.MetaData(engine, reflect=True)
# sqlite will include a dynamically generated 'sqlite_sequence' table if
# there are autoincrement sequences in the database; this should not be
# compared.
if engine.dialect.name == 'sqlite':
if 'sqlite_sequence' in db_metadata.tables:
db_metadata.remove(db_metadata.tables['sqlite_sequence'])
return SchemaDiff(metadata, db_metadata,
labelA='model',
labelB='database',
excludeTables=excludeTables)
def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None):
"""
Return differences of model against another model.
:return: object which will evaluate to :keyword:`True` if there \
are differences else :keyword:`False`.
"""
return SchemaDiff(metadataA, metadataB, excludeTables)
class ColDiff(object):
"""
Container for differences in one :class:`~sqlalchemy.schema.Column`
between two :class:`~sqlalchemy.schema.Table` instances, ``A``
and ``B``.
.. attribute:: col_A
The :class:`~sqlalchemy.schema.Column` object for A.
.. attribute:: col_B
The :class:`~sqlalchemy.schema.Column` object for B.
.. attribute:: type_A
The most generic type of the :class:`~sqlalchemy.schema.Column`
object in A.
.. attribute:: type_B
The most generic type of the :class:`~sqlalchemy.schema.Column`
object in A.
"""
diff = False
def __init__(self,col_A,col_B):
self.col_A = col_A
self.col_B = col_B
self.type_A = col_A.type
self.type_B = col_B.type
self.affinity_A = self.type_A._type_affinity
self.affinity_B = self.type_B._type_affinity
if self.affinity_A is not self.affinity_B:
self.diff = True
return
if isinstance(self.type_A,Float) or isinstance(self.type_B,Float):
if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)):
self.diff=True
return
for attr in ('precision','scale','length'):
A = getattr(self.type_A,attr,None)
B = getattr(self.type_B,attr,None)
if not (A is None or B is None) and A!=B:
self.diff=True
return
def __nonzero__(self):
return self.diff
class TableDiff(object):
"""
Container for differences in one :class:`~sqlalchemy.schema.Table`
between two :class:`~sqlalchemy.schema.MetaData` instances, ``A``
and ``B``.
.. attribute:: columns_missing_from_A
A sequence of column names that were found in B but weren't in
A.
.. attribute:: columns_missing_from_B
A sequence of column names that were found in A but weren't in
B.
.. attribute:: columns_different
A dictionary containing information about columns that were
found to be different.
It maps column names to a :class:`ColDiff` objects describing the
differences found.
"""
__slots__ = (
'columns_missing_from_A',
'columns_missing_from_B',
'columns_different',
)
def __nonzero__(self):
return bool(
self.columns_missing_from_A or
self.columns_missing_from_B or
self.columns_different
)
class SchemaDiff(object):
"""
Compute the difference between two :class:`~sqlalchemy.schema.MetaData`
objects.
The string representation of a :class:`SchemaDiff` will summarise
the changes found between the two
:class:`~sqlalchemy.schema.MetaData` objects.
The length of a :class:`SchemaDiff` will give the number of
changes found, enabling it to be used much like a boolean in
expressions.
:param metadataA:
First :class:`~sqlalchemy.schema.MetaData` to compare.
:param metadataB:
Second :class:`~sqlalchemy.schema.MetaData` to compare.
:param labelA:
The label to use in messages about the first
:class:`~sqlalchemy.schema.MetaData`.
:param labelB:
The label to use in messages about the second
:class:`~sqlalchemy.schema.MetaData`.
:param excludeTables:
A sequence of table names to exclude.
.. attribute:: tables_missing_from_A
A sequence of table names that were found in B but weren't in
A.
.. attribute:: tables_missing_from_B
A sequence of table names that were found in A but weren't in
B.
.. attribute:: tables_different
A dictionary containing information about tables that were found
to be different.
It maps table names to a :class:`TableDiff` objects describing the
differences found.
"""
def __init__(self,
metadataA, metadataB,
labelA='metadataA',
labelB='metadataB',
excludeTables=None):
self.metadataA, self.metadataB = metadataA, metadataB
self.labelA, self.labelB = labelA, labelB
self.label_width = max(len(labelA),len(labelB))
excludeTables = set(excludeTables or [])
A_table_names = set(metadataA.tables.keys())
B_table_names = set(metadataB.tables.keys())
self.tables_missing_from_A = sorted(
B_table_names - A_table_names - excludeTables
)
self.tables_missing_from_B = sorted(
A_table_names - B_table_names - excludeTables
)
self.tables_different = {}
for table_name in A_table_names.intersection(B_table_names):
td = TableDiff()
A_table = metadataA.tables[table_name]
B_table = metadataB.tables[table_name]
A_column_names = set(A_table.columns.keys())
B_column_names = set(B_table.columns.keys())
td.columns_missing_from_A = sorted(
B_column_names - A_column_names
)
td.columns_missing_from_B = sorted(
A_column_names - B_column_names
)
td.columns_different = {}
for col_name in A_column_names.intersection(B_column_names):
cd = ColDiff(
A_table.columns.get(col_name),
B_table.columns.get(col_name)
)
if cd:
td.columns_different[col_name]=cd
# XXX - index and constraint differences should
# be checked for here
if td:
self.tables_different[table_name]=td
def __str__(self):
''' Summarize differences. '''
out = []
column_template =' %%%is: %%r' % self.label_width
for names,label in (
(self.tables_missing_from_A,self.labelA),
(self.tables_missing_from_B,self.labelB),
):
if names:
out.append(
' tables missing from %s: %s' % (
label,', '.join(sorted(names))
)
)
for name,td in sorted(self.tables_different.items()):
out.append(
' table with differences: %s' % name
)
for names,label in (
(td.columns_missing_from_A,self.labelA),
(td.columns_missing_from_B,self.labelB),
):
if names:
out.append(
' %s missing these columns: %s' % (
label,', '.join(sorted(names))
)
)
for name,cd in td.columns_different.items():
out.append(' column with differences: %s' % name)
out.append(column_template % (self.labelA,cd.col_A))
out.append(column_template % (self.labelB,cd.col_B))
if out:
out.insert(0, 'Schema diffs:')
return '\n'.join(out)
else:
return 'No schema diffs'
def __len__(self):
"""
Used in bool evaluation, return of 0 means no diffs.
"""
return (
len(self.tables_missing_from_A) +
len(self.tables_missing_from_B) +
len(self.tables_different)
)
|
gkc1000/pyscf | refs/heads/master | examples/mcscf/03-natural_orbital.py | 2 | #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Generally, the CASSCF solver does NOT return the natural orbitals.
1. Attribute .natorb controls whether the active space orbitals are transformed
to natural orbitals in the results.
2. When .natorb is set, the natural orbitals may NOT be sorted by the active
space occupancy. Within each irreps
'''
import numpy
from pyscf import gto, scf, mcscf
mol = gto.M(
atom = 'O 0 0 0; O 0 0 1.2',
basis = 'ccpvdz',
spin = 2)
myhf = scf.RHF(mol).run()
# 6 orbitals, 8 electrons
mycas = mcscf.CASSCF(myhf, 6, 8)
mycas.kernel() # Here mycas.mo_coeff are not natural orbitals
mycas.natorb = True
mycas.kernel() # Here mycas.mo_coeff are natural orbitals
#
# The natural orbitals in active space are NOT sorted by the occupancy.
#
mol = gto.M(
atom = 'O 0 0 0; O 0 0 1.2',
basis = 'ccpvdz',
symmetry = True,
spin = 2)
myhf = scf.RHF(mol).run()
mycas = mcscf.CASSCF(myhf, 6, 8)
mycas.natorb = True
# Here mycas.mo_coeff are natural orbitals because .natorb is on.
# Note The active space orbitals have the same symmetry as the input HF
# canonical orbitals. They are not fully sorted wrt the occpancies.
# The mcscf active orbitals are sorted only within each irreps.
mycas.kernel()
|
mwx1993/TACTIC | refs/heads/master | src/tactic/active_directory/ad_connect.py | 6 | ###########################################################
#
# Copyright (c) 2005-2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['ADConnect']
import tacticenv
import os
from ad_lookup import ADException
ERROR = ""
try:
import win32security, pywintypes
import active_directory
except ImportError, e:
if os.name != 'nt':
ERROR = "Active directory libraries only work on a Windows platform"
else:
ERROR = 'Cannot import Win32 modules,\n%s' % str(e)
class ADConnect(object):
def __init__(my):
my.debug_flag= False
my.domain=""
my.username=""
my.password=""
if ERROR:
raise ADException(ERROR)
def set_user(my, username):
my.username = username
my.debug("Setting username to %s" % my.username)
return True
def set_password(my, password):
my.password = password
my.debug("Setting password to %s" % my.password)
def set_domain(my, domain):
my.domain = domain
my.debug("Setting domain to %s" % my.domain)
def set_debug(my, bool):
my.debug_flag=True
def set_ldap_string(my, ldapstring):
my.ldap_string=ldapstring
def debug(my, message):
if my.debug_flag:
print message
def lookup(my):
my.debug("Looking up info on %s." % (my.username))
try:
account=win32security.LookupAccountName(None, my.username)
return account
except pywintypes.error, e:
return False
def logon(my):
my.debug("Logging on %s to %s with %s." % (my.username, my.domain, my.password))
try:
handle=win32security.LogonUser(my.username, my.domain, my.password,
win32security.LOGON32_LOGON_NETWORK,
win32security.LOGON32_PROVIDER_DEFAULT)
# We're not going to use the handle, just seeing if we can get it.
handle.Close()
return True
except pywintypes.error, e:
# Because of the sheer number of Windows-specific errors that can
# occur here, we have to assume any of them mean that the
# credentials were not valid.
print e
return False
import sys, getopt
def usage():
print "ADS credentials checker"
print "Usage: ad_connect.py [Option]"
print "Check for ADS connectivity"
print ""
print "-u <username> username"
print "-p <password> password"
print "-h, --help Display this message, and exit"
print "-a <domain> Set domain"
print "-c check credentials against server"
print "-i lookup account info"
print "-d Debug messages"
print ""
def main(argv):
try:
opts, args = getopt.getopt(argv, "u:p:a:hdci", ["help"])
except getopt.GetoptError:
usage()
sys.exit(2)
#try:
if len(opts) > 0:
ads = ADConnect()
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt == '-u':
ads.set_user(arg)
elif opt == '-p':
ads.set_password(arg)
elif opt == '-a':
ads.set_domain(arg)
elif opt == '-l':
ads.set_logon(True)
elif opt == '-d':
ads.set_debug(True)
elif opt == '-c':
if ads.logon():
print "Successful logon"
else:
print "Failed logon"
elif opt == '-i':
x=ads.lookup()
print "SID:%s" % x[0]
print "Domain:%s" % x[1]
else:
usage()
sys.exit()
else:
print ("Try 'ad_connect.py -h' for more information.")
if __name__ == '__main__':
main(sys.argv[1:])
|
jaraco/pytest | refs/heads/master | doc/en/example/assertion/test_setup_flow_example.py | 217 | def setup_module(module):
module.TestStateFullThing.classcount = 0
class TestStateFullThing:
def setup_class(cls):
cls.classcount += 1
def teardown_class(cls):
cls.classcount -= 1
def setup_method(self, method):
self.id = eval(method.__name__[5:])
def test_42(self):
assert self.classcount == 1
assert self.id == 42
def test_23(self):
assert self.classcount == 1
assert self.id == 23
def teardown_module(module):
assert module.TestStateFullThing.classcount == 0
""" For this example the control flow happens as follows::
import test_setup_flow_example
setup_module(test_setup_flow_example)
setup_class(TestStateFullThing)
instance = TestStateFullThing()
setup_method(instance, instance.test_42)
instance.test_42()
setup_method(instance, instance.test_23)
instance.test_23()
teardown_class(TestStateFullThing)
teardown_module(test_setup_flow_example)
Note that ``setup_class(TestStateFullThing)`` is called and not
``TestStateFullThing.setup_class()`` which would require you
to insert ``setup_class = classmethod(setup_class)`` to make
your setup function callable.
"""
|
javierwilson/forocacao | refs/heads/master | docs/__init__.py | 887 | # Included so that Django's startproject comment runs against the docs directory
|
manaschaturvedi/oscarbuddy | refs/heads/master | requests-master/requests/packages/chardet/mbcharsetprober.py | 2923 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
class MultiByteCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mDistributionAnalyzer = None
self._mCodingSM = None
self._mLastChar = [0, 0]
def reset(self):
CharSetProber.reset(self)
if self._mCodingSM:
self._mCodingSM.reset()
if self._mDistributionAnalyzer:
self._mDistributionAnalyzer.reset()
self._mLastChar = [0, 0]
def get_charset_name(self):
pass
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mDistributionAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
return self._mDistributionAnalyzer.get_confidence()
|
mdanielwork/intellij-community | refs/heads/master | python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_0/_pkg1_0_1/_pkg1_0_1_1/_pkg1_0_1_1_0/_mod1_0_1_1_0_2.py | 30 | name1_0_1_1_0_2_0 = None
name1_0_1_1_0_2_1 = None
name1_0_1_1_0_2_2 = None
name1_0_1_1_0_2_3 = None
name1_0_1_1_0_2_4 = None |
atodorov/pykickstart | refs/heads/master | tests/commands/upgrade.py | 3 | #
# Martin Gracik <mgracik@redhat.com>
#
# Copyright 2009 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
import warnings
from tests.baseclass import CommandTest
from pykickstart.base import DeprecatedCommand
from pykickstart.commands.upgrade import FC3_Upgrade
class Upgrade_TestCase(unittest.TestCase):
def runTest(self):
cmd = FC3_Upgrade()
self.assertEqual(cmd.__str__(), '')
class FC3_TestCase(CommandTest):
command = "upgrade"
def runTest(self):
# pass
self.assert_parse("upgrade", "upgrade\n")
self.assert_parse("install", "install\n")
# fail
self.assert_parse_error("upgrade install")
self.assert_parse_error("upgrade --bad-flag")
self.assert_parse_error("install --bad-flag")
class F11_TestCase(FC3_TestCase):
def runTest(self):
FC3_TestCase.runTest(self)
# pass
self.assert_parse("upgrade", "upgrade\n")
self.assert_parse("install", "install\n")
self.assert_parse("upgrade --root-device=/dev/sda", "upgrade --root-device=/dev/sda\n")
self.assert_parse("install --root-device=/dev/sda", "install\n")
# fail
# --root-device requires argument
self.assert_parse_error("upgrade --root-device")
self.assert_parse_error("upgrade --root-device=\"\"")
# unknown option
self.assert_parse_error("upgrade --bad-flag")
class F20_TestCase(F11_TestCase):
def runTest(self):
# make sure we've been deprecated
parser = self.getParser("upgrade")
self.assertEqual(issubclass(parser.__class__, DeprecatedCommand), True)
# deprecated commands don't return a useful string - test that somewhere
self.assertEqual(str(self.getParser("upgrade")), "")
# deprecated commands also raise a deprecation warning - test that somewhere
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# The upgrade command takes no arguments and we've already got the object
# that knows how to parse it from the beginning of this function.
parser.parse([])
self.assertEqual(len(w), 1)
class F29_TestCase(F20_TestCase):
def runTest(self):
# make sure that upgrade is removed
self.assertNotIn("upgrade", self.handler().commands)
class RHEL8_TestCase(F29_TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
2015fallproject/2015fallcase2 | refs/heads/master | static/Brython3.2.0-20150701-214155/Lib/test/unittests/test_runpy.py | 29 | # Test the runpy module
import unittest
import os
import os.path
import sys
import re
import tempfile
import importlib
import py_compile
from test.support import (
forget, make_legacy_pyc, run_unittest, unload, verbose, no_tracing,
create_empty_file)
from test.script_helper import (
make_pkg, make_script, make_zip_pkg, make_zip_script, temp_dir)
import runpy
from runpy import _run_code, _run_module_code, run_module, run_path
# Note: This module can't safely test _run_module_as_main as it
# runs its tests in the current process, which would mess with the
# real __main__ module (usually test.regrtest)
# See test_cmd_line_script for a test that executes that code path
# Set up the test code and expected results
example_source = """\
# Check basic code execution
result = ['Top level assignment']
def f():
result.append('Lower level reference')
f()
del f
# Check the sys module
import sys
run_argv0 = sys.argv[0]
run_name_in_sys_modules = __name__ in sys.modules
module_in_sys_modules = (run_name_in_sys_modules and
globals() is sys.modules[__name__].__dict__)
# Check nested operation
import runpy
nested = runpy._run_module_code('x=1\\n', mod_name='<run>')
"""
implicit_namespace = {
"__name__": None,
"__file__": None,
"__cached__": None,
"__package__": None,
"__doc__": None,
}
example_namespace = {
"sys": sys,
"runpy": runpy,
"result": ["Top level assignment", "Lower level reference"],
"run_argv0": sys.argv[0],
"run_name_in_sys_modules": False,
"module_in_sys_modules": False,
"nested": dict(implicit_namespace,
x=1, __name__="<run>", __loader__=None),
}
example_namespace.update(implicit_namespace)
class CodeExecutionMixin:
# Issue #15230 (run_path not handling run_name correctly) highlighted a
# problem with the way arguments were being passed from higher level APIs
# down to lower level code. This mixin makes it easier to ensure full
# testing occurs at those upper layers as well, not just at the utility
# layer
def assertNamespaceMatches(self, result_ns, expected_ns):
"""Check two namespaces match.
Ignores any unspecified interpreter created names
"""
# Impls are permitted to add extra names, so filter them out
for k in list(result_ns):
if k.startswith("__") and k.endswith("__"):
if k not in expected_ns:
result_ns.pop(k)
if k not in expected_ns["nested"]:
result_ns["nested"].pop(k)
# Don't use direct dict comparison - the diffs are too hard to debug
self.assertEqual(set(result_ns), set(expected_ns))
for k in result_ns:
actual = (k, result_ns[k])
expected = (k, expected_ns[k])
self.assertEqual(actual, expected)
def check_code_execution(self, create_namespace, expected_namespace):
"""Check that an interface runs the example code correctly
First argument is a callable accepting the initial globals and
using them to create the actual namespace
Second argument is the expected result
"""
sentinel = object()
expected_ns = expected_namespace.copy()
run_name = expected_ns["__name__"]
saved_argv0 = sys.argv[0]
saved_mod = sys.modules.get(run_name, sentinel)
# Check without initial globals
result_ns = create_namespace(None)
self.assertNamespaceMatches(result_ns, expected_ns)
self.assertIs(sys.argv[0], saved_argv0)
self.assertIs(sys.modules.get(run_name, sentinel), saved_mod)
# And then with initial globals
initial_ns = {"sentinel": sentinel}
expected_ns["sentinel"] = sentinel
result_ns = create_namespace(initial_ns)
self.assertIsNot(result_ns, initial_ns)
self.assertNamespaceMatches(result_ns, expected_ns)
self.assertIs(sys.argv[0], saved_argv0)
self.assertIs(sys.modules.get(run_name, sentinel), saved_mod)
class ExecutionLayerTestCase(unittest.TestCase, CodeExecutionMixin):
"""Unit tests for runpy._run_code and runpy._run_module_code"""
def test_run_code(self):
expected_ns = example_namespace.copy()
expected_ns.update({
"__loader__": None,
})
def create_ns(init_globals):
return _run_code(example_source, {}, init_globals)
self.check_code_execution(create_ns, expected_ns)
def test_run_module_code(self):
mod_name = "<Nonsense>"
mod_fname = "Some other nonsense"
mod_loader = "Now you're just being silly"
mod_package = '' # Treat as a top level module
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": mod_name,
"__file__": mod_fname,
"__loader__": mod_loader,
"__package__": mod_package,
"run_argv0": mod_fname,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
def create_ns(init_globals):
return _run_module_code(example_source,
init_globals,
mod_name,
mod_fname,
mod_loader,
mod_package)
self.check_code_execution(create_ns, expected_ns)
# TODO: Use self.addCleanup to get rid of a lot of try-finally blocks
class RunModuleTestCase(unittest.TestCase, CodeExecutionMixin):
"""Unit tests for runpy.run_module"""
def expect_import_error(self, mod_name):
try:
run_module(mod_name)
except ImportError:
pass
else:
self.fail("Expected import error for " + mod_name)
def test_invalid_names(self):
# Builtin module
self.expect_import_error("sys")
# Non-existent modules
self.expect_import_error("sys.imp.eric")
self.expect_import_error("os.path.half")
self.expect_import_error("a.bee")
self.expect_import_error(".howard")
self.expect_import_error("..eaten")
# Package without __main__.py
self.expect_import_error("multiprocessing")
def test_library_module(self):
self.assertEqual(run_module("runpy")["__name__"], "runpy")
def _add_pkg_dir(self, pkg_dir):
os.mkdir(pkg_dir)
pkg_fname = os.path.join(pkg_dir, "__init__.py")
create_empty_file(pkg_fname)
return pkg_fname
def _make_pkg(self, source, depth, mod_base="runpy_test"):
pkg_name = "__runpy_pkg__"
test_fname = mod_base+os.extsep+"py"
pkg_dir = sub_dir = os.path.realpath(tempfile.mkdtemp())
if verbose > 1: print(" Package tree in:", sub_dir)
sys.path.insert(0, pkg_dir)
if verbose > 1: print(" Updated sys.path:", sys.path[0])
for i in range(depth):
sub_dir = os.path.join(sub_dir, pkg_name)
pkg_fname = self._add_pkg_dir(sub_dir)
if verbose > 1: print(" Next level in:", sub_dir)
if verbose > 1: print(" Created:", pkg_fname)
mod_fname = os.path.join(sub_dir, test_fname)
mod_file = open(mod_fname, "w")
mod_file.write(source)
mod_file.close()
if verbose > 1: print(" Created:", mod_fname)
mod_name = (pkg_name+".")*depth + mod_base
return pkg_dir, mod_fname, mod_name
def _del_pkg(self, top, depth, mod_name):
for entry in list(sys.modules):
if entry.startswith("__runpy_pkg__"):
del sys.modules[entry]
if verbose > 1: print(" Removed sys.modules entries")
del sys.path[0]
if verbose > 1: print(" Removed sys.path entry")
for root, dirs, files in os.walk(top, topdown=False):
for name in files:
try:
os.remove(os.path.join(root, name))
except OSError as ex:
if verbose > 1: print(ex) # Persist with cleaning up
for name in dirs:
fullname = os.path.join(root, name)
try:
os.rmdir(fullname)
except OSError as ex:
if verbose > 1: print(ex) # Persist with cleaning up
try:
os.rmdir(top)
if verbose > 1: print(" Removed package tree")
except OSError as ex:
if verbose > 1: print(ex) # Persist with cleaning up
def _fix_ns_for_legacy_pyc(self, ns, alter_sys):
char_to_add = "c" if __debug__ else "o"
ns["__file__"] += char_to_add
if alter_sys:
ns["run_argv0"] += char_to_add
def _check_module(self, depth, alter_sys=False):
pkg_dir, mod_fname, mod_name = (
self._make_pkg(example_source, depth))
forget(mod_name)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": mod_name,
"__file__": mod_fname,
"__package__": mod_name.rpartition(".")[0],
})
if alter_sys:
expected_ns.update({
"run_argv0": mod_fname,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
def create_ns(init_globals):
return run_module(mod_name, init_globals, alter_sys=alter_sys)
try:
if verbose > 1: print("Running from source:", mod_name)
self.check_code_execution(create_ns, expected_ns)
importlib.invalidate_caches()
__import__(mod_name)
os.remove(mod_fname)
if not sys.dont_write_bytecode:
make_legacy_pyc(mod_fname)
unload(mod_name) # In case loader caches paths
importlib.invalidate_caches()
if verbose > 1: print("Running from compiled:", mod_name)
self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
self.check_code_execution(create_ns, expected_ns)
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose > 1: print("Module executed successfully")
def _check_package(self, depth, alter_sys=False):
pkg_dir, mod_fname, mod_name = (
self._make_pkg(example_source, depth, "__main__"))
pkg_name = mod_name.rpartition(".")[0]
forget(mod_name)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": mod_name,
"__file__": mod_fname,
"__package__": pkg_name,
})
if alter_sys:
expected_ns.update({
"run_argv0": mod_fname,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
def create_ns(init_globals):
return run_module(pkg_name, init_globals, alter_sys=alter_sys)
try:
if verbose > 1: print("Running from source:", pkg_name)
self.check_code_execution(create_ns, expected_ns)
importlib.invalidate_caches()
__import__(mod_name)
os.remove(mod_fname)
if not sys.dont_write_bytecode:
make_legacy_pyc(mod_fname)
unload(mod_name) # In case loader caches paths
if verbose > 1: print("Running from compiled:", pkg_name)
importlib.invalidate_caches()
self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
self.check_code_execution(create_ns, expected_ns)
finally:
self._del_pkg(pkg_dir, depth, pkg_name)
if verbose > 1: print("Package executed successfully")
def _add_relative_modules(self, base_dir, source, depth):
if depth <= 1:
raise ValueError("Relative module test needs depth > 1")
pkg_name = "__runpy_pkg__"
module_dir = base_dir
for i in range(depth):
parent_dir = module_dir
module_dir = os.path.join(module_dir, pkg_name)
# Add sibling module
sibling_fname = os.path.join(module_dir, "sibling.py")
create_empty_file(sibling_fname)
if verbose > 1: print(" Added sibling module:", sibling_fname)
# Add nephew module
uncle_dir = os.path.join(parent_dir, "uncle")
self._add_pkg_dir(uncle_dir)
if verbose > 1: print(" Added uncle package:", uncle_dir)
cousin_dir = os.path.join(uncle_dir, "cousin")
self._add_pkg_dir(cousin_dir)
if verbose > 1: print(" Added cousin package:", cousin_dir)
nephew_fname = os.path.join(cousin_dir, "nephew.py")
create_empty_file(nephew_fname)
if verbose > 1: print(" Added nephew module:", nephew_fname)
def _check_relative_imports(self, depth, run_name=None):
contents = r"""\
from __future__ import absolute_import
from . import sibling
from ..uncle.cousin import nephew
"""
pkg_dir, mod_fname, mod_name = (
self._make_pkg(contents, depth))
if run_name is None:
expected_name = mod_name
else:
expected_name = run_name
try:
self._add_relative_modules(pkg_dir, contents, depth)
pkg_name = mod_name.rpartition('.')[0]
if verbose > 1: print("Running from source:", mod_name)
d1 = run_module(mod_name, run_name=run_name) # Read from source
self.assertEqual(d1["__name__"], expected_name)
self.assertEqual(d1["__package__"], pkg_name)
self.assertIn("sibling", d1)
self.assertIn("nephew", d1)
del d1 # Ensure __loader__ entry doesn't keep file open
importlib.invalidate_caches()
__import__(mod_name)
os.remove(mod_fname)
if not sys.dont_write_bytecode:
make_legacy_pyc(mod_fname)
unload(mod_name) # In case the loader caches paths
if verbose > 1: print("Running from compiled:", mod_name)
importlib.invalidate_caches()
d2 = run_module(mod_name, run_name=run_name) # Read from bytecode
self.assertEqual(d2["__name__"], expected_name)
self.assertEqual(d2["__package__"], pkg_name)
self.assertIn("sibling", d2)
self.assertIn("nephew", d2)
del d2 # Ensure __loader__ entry doesn't keep file open
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose > 1: print("Module executed successfully")
def test_run_module(self):
for depth in range(4):
if verbose > 1: print("Testing package depth:", depth)
self._check_module(depth)
def test_run_package(self):
for depth in range(1, 4):
if verbose > 1: print("Testing package depth:", depth)
self._check_package(depth)
def test_run_module_alter_sys(self):
for depth in range(4):
if verbose > 1: print("Testing package depth:", depth)
self._check_module(depth, alter_sys=True)
def test_run_package_alter_sys(self):
for depth in range(1, 4):
if verbose > 1: print("Testing package depth:", depth)
self._check_package(depth, alter_sys=True)
def test_explicit_relative_import(self):
for depth in range(2, 5):
if verbose > 1: print("Testing relative imports at depth:", depth)
self._check_relative_imports(depth)
def test_main_relative_import(self):
for depth in range(2, 5):
if verbose > 1: print("Testing main relative imports at depth:", depth)
self._check_relative_imports(depth, "__main__")
def test_run_name(self):
depth = 1
run_name = "And now for something completely different"
pkg_dir, mod_fname, mod_name = (
self._make_pkg(example_source, depth))
forget(mod_name)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": run_name,
"__file__": mod_fname,
"__package__": mod_name.rpartition(".")[0],
})
def create_ns(init_globals):
return run_module(mod_name, init_globals, run_name)
try:
self.check_code_execution(create_ns, expected_ns)
finally:
self._del_pkg(pkg_dir, depth, mod_name)
def test_pkgutil_walk_packages(self):
# This is a dodgy hack to use the test_runpy infrastructure to test
# issue #15343. Issue #15348 declares this is indeed a dodgy hack ;)
import pkgutil
max_depth = 4
base_name = "__runpy_pkg__"
package_suffixes = ["uncle", "uncle.cousin"]
module_suffixes = ["uncle.cousin.nephew", base_name + ".sibling"]
expected_packages = set()
expected_modules = set()
for depth in range(1, max_depth):
pkg_name = ".".join([base_name] * depth)
expected_packages.add(pkg_name)
for name in package_suffixes:
expected_packages.add(pkg_name + "." + name)
for name in module_suffixes:
expected_modules.add(pkg_name + "." + name)
pkg_name = ".".join([base_name] * max_depth)
expected_packages.add(pkg_name)
expected_modules.add(pkg_name + ".runpy_test")
pkg_dir, mod_fname, mod_name = (
self._make_pkg("", max_depth))
self.addCleanup(self._del_pkg, pkg_dir, max_depth, mod_name)
for depth in range(2, max_depth+1):
self._add_relative_modules(pkg_dir, "", depth)
for finder, mod_name, ispkg in pkgutil.walk_packages([pkg_dir]):
self.assertIsInstance(finder,
importlib.machinery.FileFinder)
if ispkg:
expected_packages.remove(mod_name)
else:
expected_modules.remove(mod_name)
self.assertEqual(len(expected_packages), 0, expected_packages)
self.assertEqual(len(expected_modules), 0, expected_modules)
class RunPathTestCase(unittest.TestCase, CodeExecutionMixin):
"""Unit tests for runpy.run_path"""
def _make_test_script(self, script_dir, script_basename, source=None):
if source is None:
source = example_source
return make_script(script_dir, script_basename, source)
def _check_script(self, script_name, expected_name, expected_file,
expected_argv0):
# First check is without run_name
def create_ns(init_globals):
return run_path(script_name, init_globals)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": expected_name,
"__file__": expected_file,
"__package__": "",
"run_argv0": expected_argv0,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
self.check_code_execution(create_ns, expected_ns)
# Second check makes sure run_name works in all cases
run_name = "prove.issue15230.is.fixed"
def create_ns(init_globals):
return run_path(script_name, init_globals, run_name)
expected_ns["__name__"] = run_name
expected_ns["__package__"] = run_name.rpartition(".")[0]
self.check_code_execution(create_ns, expected_ns)
def _check_import_error(self, script_name, msg):
msg = re.escape(msg)
self.assertRaisesRegex(ImportError, msg, run_path, script_name)
def test_basic_script(self):
with temp_dir() as script_dir:
mod_name = 'script'
script_name = self._make_test_script(script_dir, mod_name)
self._check_script(script_name, "<run_path>", script_name,
script_name)
def test_script_compiled(self):
with temp_dir() as script_dir:
mod_name = 'script'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
os.remove(script_name)
self._check_script(compiled_name, "<run_path>", compiled_name,
compiled_name)
def test_directory(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
self._check_script(script_dir, "<run_path>", script_name,
script_dir)
def test_directory_compiled(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
os.remove(script_name)
if not sys.dont_write_bytecode:
legacy_pyc = make_legacy_pyc(script_name)
self._check_script(script_dir, "<run_path>", legacy_pyc,
script_dir)
def test_directory_error(self):
with temp_dir() as script_dir:
mod_name = 'not_main'
script_name = self._make_test_script(script_dir, mod_name)
msg = "can't find '__main__' module in %r" % script_dir
self._check_import_error(script_dir, msg)
def test_zipfile(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
self._check_script(zip_name, "<run_path>", fname, zip_name)
def test_zipfile_compiled(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
zip_name, fname = make_zip_script(script_dir, 'test_zip',
compiled_name)
self._check_script(zip_name, "<run_path>", fname, zip_name)
def test_zipfile_error(self):
with temp_dir() as script_dir:
mod_name = 'not_main'
script_name = self._make_test_script(script_dir, mod_name)
zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
msg = "can't find '__main__' module in %r" % zip_name
self._check_import_error(zip_name, msg)
@no_tracing
def test_main_recursion_error(self):
with temp_dir() as script_dir, temp_dir() as dummy_dir:
mod_name = '__main__'
source = ("import runpy\n"
"runpy.run_path(%r)\n") % dummy_dir
script_name = self._make_test_script(script_dir, mod_name, source)
zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
msg = "recursion depth exceeded"
self.assertRaisesRegex(RuntimeError, msg, run_path, zip_name)
def test_encoding(self):
with temp_dir() as script_dir:
filename = os.path.join(script_dir, 'script.py')
with open(filename, 'w', encoding='latin1') as f:
f.write("""
#coding:latin1
s = "non-ASCII: h\xe9"
""")
result = run_path(filename)
self.assertEqual(result['s'], "non-ASCII: h\xe9")
def test_main():
run_unittest(
ExecutionLayerTestCase,
RunModuleTestCase,
RunPathTestCase
)
if __name__ == "__main__":
test_main()
|
farr/plotutils | refs/heads/master | plotutils/parameterizations.py | 1 | """Useful parameterizations for various commonly-found elements of
models.
"""
import numpy as np
def _cov_matrix_dim(x):
x = np.atleast_1d(x)
M = x.shape[0]
N = int(round(0.5*(np.sqrt(1.0 + 8.0*M) - 1)))
if not ((N+1)*N/2 == M):
raise ValueError('input must have shape (N+1)*N/2 for some N')
return N
def _cov_params_to_cholesky(x):
N = _cov_matrix_dim(x)
y = np.zeros((N,N))
y[np.tril_indices(N)] = x
y.flat[::(N+1)] = np.exp(y.flat[::(N+1)])
return y
def cov_matrix(x):
r"""Returns a covariance matrix from the parameters ``x``, which should
be of shape ``((N+1)*N/2,)`` for an ``(N,N)`` covariance matrix.
The parametersation is taken from the `Stan
<http://mc-stan.org/>`_ sampler, and is as follows. The
parameters ``x`` are the lower-triangluar elements of a matrix,
:math:`y`. Let
.. math::
z_{ij} = \begin{cases}
\exp\left(y_{ij}\right) & i = j \\
y_{ij} & \mathrm{otherwise}
\end{cases}
Then the covariance matrix is
.. math::
\Sigma = z z^T
With this parameterization, there are no constraints on the
components of ``x``.
"""
y = _cov_params_to_cholesky(x)
return np.dot(y, y.T)
def cov_parameters(m):
"""Given a covariance matrix, ``m``, returns the parameters associated
with it through the :func:`cov_matrix` function.
"""
m = np.atleast_2d(m)
N = m.shape[0]
z = np.linalg.cholesky(m)
z.flat[::(N+1)] = np.log(z.flat[::(N+1)])
return z[np.tril_indices(N)]
def cov_log_jacobian(x):
r"""Returns the log of the determinant of the Jacobian of the
transformation that produces the covariance matrix from parameters
``x``:
.. math::
\log |J| = \log \left| \frac{\partial \Sigma}{\partial x} \right|
"""
N = _cov_matrix_dim(x)
y = _cov_params_to_cholesky(x)
expts = N - np.arange(1, N+1) + 2
return N*np.log(2) + np.dot(np.log(np.diag(y)), expts)
def _logit(x):
return np.log(x) - np.log1p(-x)
def _invlogit(y):
return 1.0/(1.0 + np.exp(-y))
def _usimp_zs(p):
p = np.atleast_1d(p)
N = p.shape[0]+1
ks = N - np.arange(1, N)
zs = _invlogit(p - np.log(ks))
return zs
def usimp_lengths(p):
"""Given ``N-1`` parameters, ``p``, returns ``N`` positive values that
sum to one. The transformation comes from the Stan manual.
Imagine a stick that begins with unit length; the parameters are
logit-transformed fractions of the amount of the stick remaining
that is broken off in each of ``N-1`` steps to produce the ``N``
lengths.
"""
p = np.atleast_1d(p)
N = p.shape[0]+1
zs = _usimp_zs(p)
xs = np.zeros(N)
for i in range(xs.shape[0]-1):
xs[i] = zs[i]*(1 - np.sum(xs[:i]))
xs[N-1] = 1 - np.sum(xs[:N-1])
return xs
def usimp_parameters(x):
"""Returns the ``N-1`` unit simplex parameters that will produce the
``N`` lengths ``x``.
"""
x = np.atleast_1d(x)
N = x.shape[0]
zs = np.zeros(N-1)
csxs = np.cumsum(x)
for i in range(N-1):
zs[i] = x[i]/(1-csxs[i]+x[i])
ks = N - np.arange(1, N)
ys = _logit(zs) + np.log(ks)
return ys
def usimp_log_jacobian(p):
r"""Returns the log of the Jacobian factor,
.. math::
\left| \frac{\partial x}{\partial p} \right|
where :math:`x` are the unit simplex lengths.
"""
p = np.atleast_1d(p)
N = p.shape[0]
zs = _usimp_zs(p)
xs = usimp_lengths(p)
csxs = np.cumsum(xs) - xs # Starts from zero
log_j_terms = np.log(zs) + np.log1p(-zs) + np.log1p(-csxs[:-1])
return np.sum(log_j_terms)
def bounded_values(p, low=np.NINF, high=np.inf):
r"""Returns the values, each bounded between ``low`` and ``high`` (one
of these can be ``None``) associated with the parameters ``p``.
The parameterisation is
.. math::
p = \log\left( x - \mathrm{low} \right) - \log\left( \mathrm{high} - x \right)
if both lower and upper limits are given, and
.. math::
p = \log\left( x - \mathrm{low} \right)
or
.. math::
p = -\log\left( \mathrm{high} - x \right)
if only one limit is given.
:param p: The parameters associated with the values.
:math:`-\infty < p < \infty`
:param low: The lower bound on the parameters. Can be a vector
that matches the shape of ``p``.
:param high: The upper bound on the parameters. Can be a vector
that matches the shape of ``p``.
"""
p = np.atleast_1d(p)
x = np.zeros(p.shape)
for i, (p, l, h) in enumerate(np.broadcast(p, low, high)):
if l == np.NINF:
if h == np.inf:
raise ValueError('bounded_values: must supply at least one limit')
else:
# Only upper limit
x[i] = h - np.exp(-p)
else:
if h == np.inf:
# Only lower limit.
x[i] = l + np.exp(p)
else:
# Both bounds
ep = np.exp(p)
x[i] = (ep*h + l)/(ep + 1)
return x
def bounded_params(x, low=np.NINF, high=np.inf):
"""Returns the parameters associated with the values ``x`` that are
bounded between ``low`` and ``high``.
"""
x = np.atleast_1d(x)
p = np.zeros(x.shape)
for i, (x, l, h) in enumerate(np.broadcast(x, low, high)):
if l == np.NINF:
if h == np.inf:
raise ValueError('bounded_params: must supply at least one limit')
else:
# Only upper limit
p[i] = -np.log(h - x)
else:
if h == np.inf:
# Only lower limit
p[i] = np.log(x - l)
else:
p[i] = np.log(x - l) - np.log(h - x)
return p
def bounded_log_jacobian(p, low=np.NINF, high=np.inf):
r"""Returns the log of the Jacobian factor
.. math::
\left| \frac{\partial x}{\partial p} \right|
for the bounded parameters p.
"""
lj = 0.0
for p, l, h in np.broadcast(p, low, high):
if l == np.NINF:
if h == np.inf:
raise ValueError('bounded_log_jacobian: must supply at least one limit')
else:
# Only upper limit
lj -= p
else:
if h == np.inf:
# Only lower limit
lj += p
else:
# Both limits
lj += np.log(h-l) + p - 2.0*np.log1p(np.exp(p))
return lj
def increasing_values(p):
r"""Returns the values for parameters ``p`` which are constrained to be
always increasing.
The parameterisation is
.. math::
p_i = \begin{cases}
x_0 & i = 0 \\
\log\left( x_i - x_{i-1} \right) & \mathrm{otherwise}
\end{cases}
Note that :math:`-\infty < p < \infty`.
"""
p = np.atleast_1d(p)
x = np.zeros(p.shape)
x[0] = p[0]
for i in range(1, x.shape[0]):
x[i] = x[i-1] + np.exp(p[i])
return x
def increasing_params(x):
"""Returns the parameters associated with the values ``x`` which
should be increasing.
"""
x = np.atleast_1d(x)
x = np.sort(x)
p = np.zeros(x.shape)
p[0] = x[0]
for i in range(1, x.shape[0]):
p[i] = np.log(x[i] - x[i-1])
return p
def increasing_log_jacobian(p):
r"""Returns the log of the Jacobian factor
.. math::
\left| \frac{\partial x}{\partial p} \right|
for the parameters p.
"""
return np.sum(p[1:])
def _logitab(x, a, b):
return np.log(x - a) - np.log(b - x)
def _invlogitab(y, a, b):
if y > 0.0:
ey = np.exp(-y)
return (a*ey + b)/(1.0 + ey)
else:
ey = np.exp(y)
return (b*ey + a)/(1.0 + ey)
def _logitablogjac(y, a, b):
if y < 0.0:
ey = np.exp(y)
return np.log(b-a) + y - 2.0*np.log1p(ey)
else:
ey = np.exp(-y)
return np.log(b-a) - y - 2.0*np.log1p(ey)
def _stable_polynomial_roots_logjac(p, rmin, rmax):
"""
"""
p = np.atleast_1d(p)
a = -(rmax-rmin)
b = rmax
n = p.shape[0]
rs = []
lj = 0.0
for i in range(0, n-1, 2):
y = _invlogitab(p[i], a, b)
lj += _logitablogjac(p[i], a, b)
if y > 0.0:
# Complex root
x = _invlogitab(p[i+1], -rmax, -rmin)
lj += _logitablogjac(p[i+1], -rmax, -rmin)
rs.append(x + y*1j)
rs.append(x - y*1j)
b = y
else:
rs.append(y-rmin)
b = y
x = _invlogitab(p[i+1], a, b)
lj += _logitablogjac(p[i+1], a, b)
rs.append(x-rmin)
b = x
if n % 2 == 1:
if b > 0.0:
b = 0.0 # The final root must be negative real, no matter what
x = _invlogitab(p[n-1], a, b)
lj += _logitablogjac(p[n-1], a, b)
rs.append(x - rmin)
return np.array(rs, dtype=np.complex), lj
def stable_polynomial_roots(p, rmin, rmax):
r"""A parameterisation of the roots of a real, 'stable' polynomial.
A stable polynomial has roots with a negative real part; it is the
characteristic polynomial for a linear ODE with decaying
solutions. The parameterisation provides a mapping from
:math:`\mathbb{R}^n` to the roots that is one-to-one; there are no
root-permutation degeneracies. The log-Jacobian function produces
a flat distribution on the real and imaginary (if any) parts of
the roots.
:param p: The array giving the parameters in :math:`\mathbb{R}^n`
for the roots of the polynomial.
:param rmin: The real part of all the roots is bounded below
:math:`-r_\mathrm{min}`.
:param rmax: The real part of all the roots is bounded above
:math:`-r_\mathrm{max}`, and the imaginary parts of all the
roots are bounded between :math:`-r_\mathrm{max}` and
:math:`r_\mathrm{max}`.
The parameterisation uses a 'bounded logit' transformation to map
ranges of reals to :math:`\pm \infty`:
.. math::
\mathrm{logit}\left(x; a, b\right) = \log(x-a) - log(b-x)
The mapping of the roots proceeds as follows. First, discard the
roots with strictly negative imaginary parts (these are the
conjugates of corresponding roots with strictly positive imaginary
parts, so we lose no information). Then, sort the remaining roots
in order of decreasing imaginary part; if there are any strictly
real roots, with imaginary part zero, sort these in decreasing
order, from least negative to most negative.
Let :math:`a = -\left( r_\mathrm{max} - r_\mathrm{min} \right)`
and :math:`b = r_\mathrm{max}`. Then, proceeding by pairs of
roots:
* If the imaginary part of root ``i`` is greater than zero, then
.. math::
p_i = \mathrm{logit}\left(\mathrm{im}\left(r_i\right); a, b\right) \\
p_{i+1} = \mathrm{logit}\left(\mathrm{re}\left(r_i\right); -r_\mathrm{max}, -r_\mathrm{min} \right)
Then set :math:`b = \mathrm{im} r_i`, and proceed to the next pair.
* If the imaginary part of root ``i`` is zero, then
.. math::
p_i = \mathrm{logit}\left( r_i + r_\mathrm{min}; a, b \right) \\
p_{i+1} = \mathrm{logit}\left( r_{i+1} + r_\mathrm{min}; a, r_i + r_\mathrm{min} \right)
then set :math:`b = r_{i+1} + r_\mathrm{min}`.
* If the number of roots is odd, then the final root must be
real, and is the smallest of all the real roots:
.. math::
p_{N-1} = \mathrm{logit}\left( r_{N-1} + r_\mathrm{min}; a, \mathrm{min}(b, 0) \right).
Intuitively, you can imagine constructing the parameterisation
using a line that sweeps down the imaginary axis, starting from
:math:`i r_\mathrm{max}`; as it hits each complex root, it records
the logit of that root's imaginary and real parts (which must lie
within certain bounds), and then sets the maximum bound for the
imaginary part of the next root. Once all the complex roots have
been parameterised, a line begins at :math:`-r_\mathrm{min}` on
the real axis and sweeps left; as it hits each real root, it
records the logit of that root between the current bounds, and
resets the bound on the maximum value of the next root to the
current real root.
The reverse transformation begins by unpacking the first parameter
value using the inverse logit transform; if this value is
positive, then the root is complex, and the next parameter
correponds to the real part. If the value is negative, then this
a real root, and the following roots are also real (and negative).
Bounds on the subsequent root values are set accordingly in either
case for the next inverse logit transformation.
The parameterisation maps the allowed, sorted root space onto the
entire :math:`\mathbb{R}^N` real space in a one-to-one way.
"""
return _stable_polynomial_roots_logjac(p, rmin, rmax)[0]
def stable_polynomial_log_jacobian(p, rmin, rmax):
return _stable_polynomial_roots_logjac(p, rmin, rmax)[1]
def stable_polynomial_params(r, rmin, rmax):
r = np.atleast_1d(r)
n = r.shape[0]
cplx_r = r[np.imag(r) > 0.0]
real_r = np.real(r[np.imag(r) == 0.0])
cplx_r = cplx_r[np.argsort(np.imag(cplx_r))][::-1] # Decreasing imag
real_r = real_r[np.argsort(real_r)][::-1] # Decreasing real
a = -(rmax-rmin)
b = rmax
p = []
for rc in cplx_r:
y = np.imag(rc)
p.append(_logitab(y, a, b))
x = np.real(rc)
p.append(_logitab(x, -rmax, -rmin))
b = y
if n % 2 == 1:
last_r = real_r[-1]
real_r = real_r[:-1]
for rr in real_r:
y = rr + rmin
p.append(_logitab(y, a, b))
b = y
if n % 2 == 1:
if b > 0.0:
b = 0.0
y = last_r + rmin
p.append(_logitab(y, a, b))
return np.array(p)
|
SUSE-Cloud/nova | refs/heads/stable/havana | nova/tests/api/openstack/compute/contrib/test_networks.py | 7 | # Copyright 2011 Grid Dynamics
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import math
import netaddr
import uuid
from oslo.config import cfg
import webob
from nova.api.openstack.compute.contrib import networks_associate
from nova.api.openstack.compute.contrib import os_networks as networks
import nova.context
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
CONF = cfg.CONF
FAKE_NETWORKS = [
{
'bridge': 'br100', 'vpn_public_port': 1000,
'dhcp_start': '10.0.0.3', 'bridge_interface': 'eth0',
'updated_at': '2011-08-16 09:26:13.048257',
'id': 1, 'uuid': '20c8acc0-f747-4d71-a389-46d078ebf047',
'cidr_v6': None, 'deleted_at': None,
'gateway': '10.0.0.1', 'label': 'mynet_0',
'project_id': '1234', 'rxtx_base': None,
'vpn_private_address': '10.0.0.2', 'deleted': False,
'vlan': 100, 'broadcast': '10.0.0.7',
'netmask': '255.255.255.248', 'injected': False,
'cidr': '10.0.0.0/29',
'vpn_public_address': '127.0.0.1', 'multi_host': False,
'dns1': None, 'dns2': None, 'host': 'nsokolov-desktop',
'gateway_v6': None, 'netmask_v6': None, 'priority': None,
'created_at': '2011-08-15 06:19:19.387525',
},
{
'bridge': 'br101', 'vpn_public_port': 1001,
'dhcp_start': '10.0.0.11', 'bridge_interface': 'eth0',
'updated_at': None, 'id': 2, 'cidr_v6': None,
'uuid': '20c8acc0-f747-4d71-a389-46d078ebf000',
'deleted_at': None, 'gateway': '10.0.0.9',
'label': 'mynet_1', 'project_id': None,
'vpn_private_address': '10.0.0.10', 'deleted': False,
'vlan': 101, 'broadcast': '10.0.0.15', 'rxtx_base': None,
'netmask': '255.255.255.248', 'injected': False,
'cidr': '10.0.0.10/29', 'vpn_public_address': None,
'multi_host': False, 'dns1': None, 'dns2': None, 'host': None,
'gateway_v6': None, 'netmask_v6': None, 'priority': None,
'created_at': '2011-08-15 06:19:19.885495',
},
]
FAKE_USER_NETWORKS = [
{
'id': 1, 'cidr': '10.0.0.0/29', 'netmask': '255.255.255.248',
'gateway': '10.0.0.1', 'broadcast': '10.0.0.7', 'dns1': None,
'dns2': None, 'cidr_v6': None, 'gateway_v6': None, 'label': 'mynet_0',
'netmask_v6': None, 'uuid': '20c8acc0-f747-4d71-a389-46d078ebf047',
},
{
'id': 2, 'cidr': '10.0.0.10/29', 'netmask': '255.255.255.248',
'gateway': '10.0.0.9', 'broadcast': '10.0.0.15', 'dns1': None,
'dns2': None, 'cidr_v6': None, 'gateway_v6': None, 'label': 'mynet_1',
'netmask_v6': None, 'uuid': '20c8acc0-f747-4d71-a389-46d078ebf000',
},
]
NEW_NETWORK = {
"network": {
"bridge_interface": "eth0",
"cidr": "10.20.105.0/24",
"label": "new net 111",
"vlan_start": 111,
}
}
class FakeNetworkAPI(object):
_sentinel = object()
_vlan_is_disabled = False
def __init__(self):
self.networks = copy.deepcopy(FAKE_NETWORKS)
def disable_vlan(self):
self._vlan_is_disabled = True
def delete(self, context, network_id):
for i, network in enumerate(self.networks):
if network['id'] == network_id:
del self.networks[0]
return True
raise exception.NetworkNotFoundForUUID(uuid=network_id)
def disassociate(self, context, network_uuid):
for network in self.networks:
if network.get('uuid') == network_uuid:
network['project_id'] = None
return True
raise exception.NetworkNotFound(network_id=network_uuid)
def associate(self, context, network_uuid, host=_sentinel,
project=_sentinel):
for network in self.networks:
if network.get('uuid') == network_uuid:
if host is not FakeNetworkAPI._sentinel:
network['host'] = host
if project is not FakeNetworkAPI._sentinel:
network['project_id'] = project
return True
raise exception.NetworkNotFound(network_id=network_uuid)
def add_network_to_project(self, context,
project_id, network_uuid=None):
if self._vlan_is_disabled:
raise NotImplementedError()
if network_uuid:
for network in self.networks:
if network.get('project_id', None) is None:
network['project_id'] = project_id
return
return
for network in self.networks:
if network.get('uuid') == network_uuid:
network['project_id'] = project_id
return
def get_all(self, context):
return self._fake_db_network_get_all(context, project_only=True)
def _fake_db_network_get_all(self, context, project_only="allow_none"):
project_id = context.project_id
nets = self.networks
if nova.context.is_user_context(context) and project_only:
if project_only == 'allow_none':
nets = [n for n in self.networks
if (n['project_id'] == project_id or
n['project_id'] is None)]
else:
nets = [n for n in self.networks
if n['project_id'] == project_id]
return nets
def get(self, context, network_id):
for network in self.networks:
if network.get('uuid') == network_id:
return network
raise exception.NetworkNotFound(network_id=network_id)
def create(self, context, **kwargs):
subnet_bits = int(math.ceil(math.log(kwargs.get(
'network_size', CONF.network_size), 2)))
fixed_net_v4 = netaddr.IPNetwork(kwargs['cidr'])
prefixlen_v4 = 32 - subnet_bits
subnets_v4 = list(fixed_net_v4.subnet(
prefixlen_v4,
count=kwargs.get('num_networks', CONF.num_networks)))
new_networks = []
new_id = max((net['id'] for net in self.networks))
for index, subnet_v4 in enumerate(subnets_v4):
new_id += 1
net = {'id': new_id, 'uuid': str(uuid.uuid4())}
net['cidr'] = str(subnet_v4)
net['netmask'] = str(subnet_v4.netmask)
net['gateway'] = kwargs.get('gateway') or str(subnet_v4[1])
net['broadcast'] = str(subnet_v4.broadcast)
net['dhcp_start'] = str(subnet_v4[2])
for key in FAKE_NETWORKS[0].iterkeys():
net.setdefault(key, kwargs.get(key))
new_networks.append(net)
self.networks += new_networks
return new_networks
class NetworksTest(test.NoDBTestCase):
def setUp(self):
super(NetworksTest, self).setUp()
self.fake_network_api = FakeNetworkAPI()
self.controller = networks.NetworkController(
self.fake_network_api)
self.associate_controller = networks_associate\
.NetworkAssociateActionController(self.fake_network_api)
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
@staticmethod
def network_uuid_to_id(network):
network['id'] = network['uuid']
del network['uuid']
def test_network_list_all_as_user(self):
self.maxDiff = None
req = fakes.HTTPRequest.blank('/v2/1234/os-networks')
res_dict = self.controller.index(req)
self.assertEquals(res_dict, {'networks': []})
project_id = req.environ["nova.context"].project_id
cxt = req.environ["nova.context"]
uuid = FAKE_NETWORKS[0]['uuid']
self.fake_network_api.associate(context=cxt,
network_uuid=uuid,
project=project_id)
res_dict = self.controller.index(req)
expected = [FAKE_USER_NETWORKS[0]]
for network in expected:
self.network_uuid_to_id(network)
self.assertEquals(res_dict, {'networks': expected})
def test_network_list_all_as_admin(self):
req = fakes.HTTPRequest.blank('/v2/1234/os-networks')
req.environ["nova.context"].is_admin = True
res_dict = self.controller.index(req)
expected = copy.deepcopy(FAKE_NETWORKS)
for network in expected:
self.network_uuid_to_id(network)
self.assertEquals(res_dict, {'networks': expected})
def test_network_disassociate(self):
uuid = FAKE_NETWORKS[0]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s/action' % uuid)
res = self.controller._disassociate_host_and_project(
req, uuid, {'disassociate': None})
self.assertEqual(res.status_int, 202)
self.assertEqual(self.fake_network_api.networks[0]['project_id'], None)
self.assertEqual(self.fake_network_api.networks[0]['host'], None)
def test_network_disassociate_host_only(self):
uuid = FAKE_NETWORKS[0]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s/action' % uuid)
res = self.associate_controller._disassociate_host_only(
req, uuid, {'disassociate_host': None})
self.assertEqual(res.status_int, 202)
self.assertNotEqual(self.fake_network_api.networks[0]['project_id'],
None)
self.assertEqual(self.fake_network_api.networks[0]['host'], None)
def test_network_disassociate_project_only(self):
uuid = FAKE_NETWORKS[0]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s/action' % uuid)
res = self.associate_controller._disassociate_project_only(
req, uuid, {'disassociate_project': None})
self.assertEqual(res.status_int, 202)
self.assertEqual(self.fake_network_api.networks[0]['project_id'], None)
self.assertNotEqual(self.fake_network_api.networks[0]['host'], None)
def test_network_disassociate_not_found(self):
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/100/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._disassociate_host_and_project,
req, 100, {'disassociate': None})
def test_network_get_as_user(self):
uuid = FAKE_USER_NETWORKS[0]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected = {'network': copy.deepcopy(FAKE_USER_NETWORKS[0])}
self.network_uuid_to_id(expected['network'])
self.assertEqual(res_dict, expected)
def test_network_get_as_admin(self):
uuid = FAKE_NETWORKS[0]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s' % uuid)
req.environ["nova.context"].is_admin = True
res_dict = self.controller.show(req, uuid)
expected = {'network': copy.deepcopy(FAKE_NETWORKS[0])}
self.network_uuid_to_id(expected['network'])
self.assertEqual(res_dict, expected)
def test_network_get_not_found(self):
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/100')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, req, 100)
def test_network_delete(self):
uuid = FAKE_NETWORKS[0]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s' % uuid)
res = self.controller.delete(req, 1)
self.assertEqual(res.status_int, 202)
def test_network_delete_not_found(self):
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/100')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, req, 100)
def test_network_add_vlan_disabled(self):
self.fake_network_api.disable_vlan()
uuid = FAKE_NETWORKS[1]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/add')
self.assertRaises(webob.exc.HTTPNotImplemented,
self.controller.add, req, {'id': uuid})
def test_network_add(self):
uuid = FAKE_NETWORKS[1]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/add')
res = self.controller.add(req, {'id': uuid})
self.assertEqual(res.status_int, 202)
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s' % uuid)
req.environ["nova.context"].is_admin = True
res_dict = self.controller.show(req, uuid)
self.assertEqual(res_dict['network']['project_id'], 'fake')
def test_network_associate_with_host(self):
uuid = FAKE_NETWORKS[1]['uuid']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s/action' % uuid)
res = self.associate_controller._associate_host(
req, uuid, {'associate_host': "TestHost"})
self.assertEqual(res.status_int, 202)
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s' % uuid)
req.environ["nova.context"].is_admin = True
res_dict = self.controller.show(req, uuid)
self.assertEqual(res_dict['network']['host'], 'TestHost')
def test_network_create(self):
req = fakes.HTTPRequest.blank('/v2/1234/os-networks')
res_dict = self.controller.create(req, NEW_NETWORK)
self.assertTrue('network' in res_dict)
uuid = res_dict['network']['id']
req = fakes.HTTPRequest.blank('/v2/1234/os-networks/%s' % uuid)
res_dict = self.controller.show(req, uuid)
self.assertTrue(res_dict['network']['label'].
startswith(NEW_NETWORK['network']['label']))
def test_network_create_large(self):
req = fakes.HTTPRequest.blank('/v2/1234/os-networks')
large_network = copy.deepcopy(NEW_NETWORK)
large_network['network']['cidr'] = '128.0.0.0/4'
res_dict = self.controller.create(req, large_network)
self.assertEqual(res_dict['network']['cidr'],
large_network['network']['cidr'])
|
YYWen0o0/python-frame-django | refs/heads/master | django/contrib/messages/storage/session.py | 288 | import json
from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import MessageEncoder, MessageDecoder
from django.utils import six
class SessionStorage(BaseStorage):
"""
Stores messages in the session (that is, django.contrib.sessions).
"""
session_key = '_messages'
def __init__(self, request, *args, **kwargs):
assert hasattr(request, 'session'), "The session-based temporary "\
"message storage requires session middleware to be installed, "\
"and come before the message middleware in the "\
"MIDDLEWARE_CLASSES list."
super(SessionStorage, self).__init__(request, *args, **kwargs)
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the request's session. This storage
always stores everything it is given, so return True for the
all_retrieved flag.
"""
return self.deserialize_messages(self.request.session.get(self.session_key)), True
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages to the request's session.
"""
if messages:
self.request.session[self.session_key] = self.serialize_messages(messages)
else:
self.request.session.pop(self.session_key, None)
return []
def serialize_messages(self, messages):
encoder = MessageEncoder(separators=(',', ':'))
return encoder.encode(messages)
def deserialize_messages(self, data):
if data and isinstance(data, six.string_types):
return json.loads(data, cls=MessageDecoder)
return data
|
igor-toga/local-snat | refs/heads/master | neutron/tests/unit/notifiers/test_batch_notifier.py | 56 | # Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.notifiers import batch_notifier
from neutron.tests import base
class TestBatchNotifier(base.BaseTestCase):
def setUp(self):
super(TestBatchNotifier, self).setUp()
self.notifier = batch_notifier.BatchNotifier(0.1, lambda x: x)
self.spawn_n = mock.patch('eventlet.spawn_n').start()
def test_queue_event_no_event(self):
self.notifier.queue_event(None)
self.assertEqual(0, len(self.notifier.pending_events))
self.assertEqual(0, self.spawn_n.call_count)
def test_queue_event_first_event(self):
self.notifier.queue_event(mock.Mock())
self.assertEqual(1, len(self.notifier.pending_events))
self.assertEqual(1, self.spawn_n.call_count)
def test_queue_event_multiple_events(self):
events = 6
for i in range(0, events):
self.notifier.queue_event(mock.Mock())
self.assertEqual(events, len(self.notifier.pending_events))
self.assertEqual(1, self.spawn_n.call_count)
def test_queue_event_call_send_events(self):
with mock.patch.object(self.notifier,
'callback') as send_events:
self.spawn_n.side_effect = lambda func: func()
self.notifier.queue_event(mock.Mock())
self.assertFalse(self.notifier._waiting_to_send)
self.assertTrue(send_events.called)
|
vedujoshi/tempest | refs/heads/master | tempest/api/compute/servers/test_attach_interfaces.py | 1 | # Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from tempest.api.compute import base
from tempest.common import compute
from tempest.common.utils import net_utils
from tempest.common import waiters
from tempest import config
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class AttachInterfacesTestJSON(base.BaseV2ComputeTest):
@classmethod
def skip_checks(cls):
super(AttachInterfacesTestJSON, cls).skip_checks()
if not CONF.service_available.neutron:
raise cls.skipException("Neutron is required")
if not CONF.compute_feature_enabled.interface_attach:
raise cls.skipException("Interface attachment is not available.")
@classmethod
def setup_credentials(cls):
# This test class requires network and subnet
cls.set_network_resources(network=True, subnet=True)
super(AttachInterfacesTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(AttachInterfacesTestJSON, cls).setup_clients()
cls.subnets_client = cls.os_primary.subnets_client
cls.ports_client = cls.os_primary.ports_client
def wait_for_port_detach(self, port_id):
"""Waits for the port's device_id to be unset.
:param port_id: The id of the port being detached.
:returns: The final port dict from the show_port response.
"""
port = self.ports_client.show_port(port_id)['port']
device_id = port['device_id']
start = int(time.time())
# NOTE(mriedem): Nova updates the port's device_id to '' rather than
# None, but it's not contractual so handle Falsey either way.
while device_id:
time.sleep(self.build_interval)
port = self.ports_client.show_port(port_id)['port']
device_id = port['device_id']
timed_out = int(time.time()) - start >= self.build_timeout
if device_id and timed_out:
message = ('Port %s failed to detach (device_id %s) within '
'the required time (%s s).' %
(port_id, device_id, self.build_timeout))
raise lib_exc.TimeoutException(message)
return port
def _check_interface(self, iface, port_id=None, network_id=None,
fixed_ip=None, mac_addr=None):
if port_id:
self.assertEqual(iface['port_id'], port_id)
if network_id:
self.assertEqual(iface['net_id'], network_id)
if fixed_ip:
self.assertEqual(iface['fixed_ips'][0]['ip_address'], fixed_ip)
if mac_addr:
self.assertEqual(iface['mac_addr'], mac_addr)
def _create_server_get_interfaces(self):
server = self.create_test_server(wait_until='ACTIVE')
ifs = (self.interfaces_client.list_interfaces(server['id'])
['interfaceAttachments'])
body = waiters.wait_for_interface_status(
self.interfaces_client, server['id'], ifs[0]['port_id'], 'ACTIVE')
ifs[0]['port_state'] = body['port_state']
return server, ifs
def _test_create_interface(self, server):
iface = (self.interfaces_client.create_interface(server['id'])
['interfaceAttachment'])
iface = waiters.wait_for_interface_status(
self.interfaces_client, server['id'], iface['port_id'], 'ACTIVE')
self._check_interface(iface)
return iface
def _test_create_interface_by_network_id(self, server, ifs):
network_id = ifs[0]['net_id']
iface = self.interfaces_client.create_interface(
server['id'], net_id=network_id)['interfaceAttachment']
iface = waiters.wait_for_interface_status(
self.interfaces_client, server['id'], iface['port_id'], 'ACTIVE')
self._check_interface(iface, network_id=network_id)
return iface
def _test_create_interface_by_port_id(self, server, ifs):
network_id = ifs[0]['net_id']
port = self.ports_client.create_port(network_id=network_id)
port_id = port['port']['id']
self.addCleanup(self.ports_client.delete_port, port_id)
iface = self.interfaces_client.create_interface(
server['id'], port_id=port_id)['interfaceAttachment']
iface = waiters.wait_for_interface_status(
self.interfaces_client, server['id'], iface['port_id'], 'ACTIVE')
self._check_interface(iface, port_id=port_id)
return iface
def _test_create_interface_by_fixed_ips(self, server, ifs):
network_id = ifs[0]['net_id']
subnet_id = ifs[0]['fixed_ips'][0]['subnet_id']
ip_list = net_utils.get_unused_ip_addresses(self.ports_client,
self.subnets_client,
network_id,
subnet_id,
1)
fixed_ips = [{'ip_address': ip_list[0]}]
iface = self.interfaces_client.create_interface(
server['id'], net_id=network_id,
fixed_ips=fixed_ips)['interfaceAttachment']
self.addCleanup(self.ports_client.delete_port, iface['port_id'])
iface = waiters.wait_for_interface_status(
self.interfaces_client, server['id'], iface['port_id'], 'ACTIVE')
self._check_interface(iface, fixed_ip=ip_list[0])
return iface
def _test_show_interface(self, server, ifs):
iface = ifs[0]
_iface = self.interfaces_client.show_interface(
server['id'], iface['port_id'])['interfaceAttachment']
self._check_interface(iface, port_id=_iface['port_id'],
network_id=_iface['net_id'],
fixed_ip=_iface['fixed_ips'][0]['ip_address'],
mac_addr=_iface['mac_addr'])
def _test_delete_interface(self, server, ifs):
# NOTE(danms): delete not the first or last, but one in the middle
iface = ifs[1]
self.interfaces_client.delete_interface(server['id'], iface['port_id'])
_ifs = (self.interfaces_client.list_interfaces(server['id'])
['interfaceAttachments'])
start = int(time.time())
while len(ifs) == len(_ifs):
time.sleep(self.build_interval)
_ifs = (self.interfaces_client.list_interfaces(server['id'])
['interfaceAttachments'])
timed_out = int(time.time()) - start >= self.build_timeout
if len(ifs) == len(_ifs) and timed_out:
message = ('Failed to delete interface within '
'the required time: %s sec.' % self.build_timeout)
raise lib_exc.TimeoutException(message)
self.assertNotIn(iface['port_id'], [i['port_id'] for i in _ifs])
return _ifs
def _compare_iface_list(self, list1, list2):
# NOTE(danms): port_state will likely have changed, so just
# confirm the port_ids are the same at least
list1 = [x['port_id'] for x in list1]
list2 = [x['port_id'] for x in list2]
self.assertEqual(sorted(list1), sorted(list2))
@decorators.idempotent_id('73fe8f02-590d-4bf1-b184-e9ca81065051')
@test.services('network')
def test_create_list_show_delete_interfaces(self):
server, ifs = self._create_server_get_interfaces()
interface_count = len(ifs)
self.assertGreater(interface_count, 0)
self._check_interface(ifs[0])
try:
iface = self._test_create_interface(server)
except lib_exc.BadRequest as e:
msg = ('Multiple possible networks found, use a Network ID to be '
'more specific.')
if not CONF.compute.fixed_network_name and e.message == msg:
raise
else:
ifs.append(iface)
iface = self._test_create_interface_by_network_id(server, ifs)
ifs.append(iface)
iface = self._test_create_interface_by_port_id(server, ifs)
ifs.append(iface)
iface = self._test_create_interface_by_fixed_ips(server, ifs)
ifs.append(iface)
_ifs = (self.interfaces_client.list_interfaces(server['id'])
['interfaceAttachments'])
self._compare_iface_list(ifs, _ifs)
self._test_show_interface(server, ifs)
_ifs = self._test_delete_interface(server, ifs)
self.assertEqual(len(ifs) - 1, len(_ifs))
@decorators.attr(type='smoke')
@decorators.idempotent_id('c7e0e60b-ee45-43d0-abeb-8596fd42a2f9')
@test.services('network')
def test_add_remove_fixed_ip(self):
# Add and Remove the fixed IP to server.
server, ifs = self._create_server_get_interfaces()
interface_count = len(ifs)
self.assertGreater(interface_count, 0)
self._check_interface(ifs[0])
network_id = ifs[0]['net_id']
self.servers_client.add_fixed_ip(server['id'], networkId=network_id)
# Remove the fixed IP from server.
server_detail = self.os_primary.servers_client.show_server(
server['id'])['server']
# Get the Fixed IP from server.
fixed_ip = None
for ip_set in server_detail['addresses']:
for ip in server_detail['addresses'][ip_set]:
if ip['OS-EXT-IPS:type'] == 'fixed':
fixed_ip = ip['addr']
break
if fixed_ip is not None:
break
self.servers_client.remove_fixed_ip(server['id'], address=fixed_ip)
@decorators.skip_because(bug='1607714')
@decorators.idempotent_id('2f3a0127-95c7-4977-92d2-bc5aec602fb4')
def test_reassign_port_between_servers(self):
"""Tests the following:
1. Create a port in Neutron.
2. Create two servers in Nova.
3. Attach the port to the first server.
4. Detach the port from the first server.
5. Attach the port to the second server.
6. Detach the port from the second server.
"""
network = self.get_tenant_network()
network_id = network['id']
port = self.ports_client.create_port(network_id=network_id)
port_id = port['port']['id']
self.addCleanup(self.ports_client.delete_port, port_id)
# create two servers
_, servers = compute.create_test_server(
self.os_primary, tenant_network=network,
wait_until='ACTIVE', min_count=2)
# add our cleanups for the servers since we bypassed the base class
for server in servers:
self.addCleanup(self.delete_server, server['id'])
for server in servers:
# attach the port to the server
iface = self.interfaces_client.create_interface(
server['id'], port_id=port_id)['interfaceAttachment']
self._check_interface(iface, port_id=port_id)
# detach the port from the server; this is a cast in the compute
# API so we have to poll the port until the device_id is unset.
self.interfaces_client.delete_interface(server['id'], port_id)
self.wait_for_port_detach(port_id)
|
darac/powerline | refs/heads/develop | tests/lib/fsconfig.py | 24 | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import os
import json
from subprocess import check_call
from shutil import rmtree
from itertools import chain
from powerline import Powerline
CONFIG_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'config')
class TestPowerline(Powerline):
def __init__(self, _paths, *args, **kwargs):
super(TestPowerline, self).__init__(*args, **kwargs)
self._paths = _paths
def get_config_paths(self):
return self._paths
def mkdir_recursive(directory):
if os.path.isdir(directory):
return
mkdir_recursive(os.path.dirname(directory))
os.mkdir(directory)
class FSTree(object):
__slots__ = ('tree', 'p', 'p_kwargs', 'create_p', 'get_config_paths', 'root')
def __init__(
self,
tree,
p_kwargs={'run_once': True},
root=CONFIG_DIR,
get_config_paths=lambda p: (p,),
create_p=False
):
self.tree = tree
self.root = root
self.get_config_paths = get_config_paths
self.create_p = create_p
self.p = None
self.p_kwargs = p_kwargs
def __enter__(self, *args):
os.mkdir(self.root)
for k, v in self.tree.items():
fname = os.path.join(self.root, k) + '.json'
mkdir_recursive(os.path.dirname(fname))
with open(fname, 'w') as F:
json.dump(v, F)
if self.create_p:
self.p = TestPowerline(
_paths=self.get_config_paths(self.root),
ext='test',
renderer_module='tests.lib.config_mock',
**self.p_kwargs
)
if os.environ.get('POWERLINE_RUN_LINT_DURING_TESTS'):
try:
check_call(chain(['scripts/powerline-lint'], *[
('-p', d) for d in (
self.p.get_config_paths() if self.p
else self.get_config_paths(self.root)
)
]))
except:
self.__exit__()
raise
return self.p and self.p.__enter__(*args)
def __exit__(self, *args):
try:
rmtree(self.root)
finally:
if self.p:
self.p.__exit__(*args)
|
psawaya/Mental-Ginger | refs/heads/master | django/contrib/admindocs/utils.py | 314 | "Misc. utility functions/classes for admin documentation generator."
import re
from email.Parser import HeaderParser
from email.Errors import HeaderParseError
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_str
try:
import docutils.core
import docutils.nodes
import docutils.parsers.rst.roles
except ImportError:
docutils_is_available = False
else:
docutils_is_available = True
def trim_docstring(docstring):
"""
Uniformly trims leading/trailing whitespace from docstrings.
Based on http://www.python.org/peps/pep-0257.html#handling-docstring-indentation
"""
if not docstring or not docstring.strip():
return ''
# Convert tabs to spaces and split into lines
lines = docstring.expandtabs().splitlines()
indent = min([len(line) - len(line.lstrip()) for line in lines if line.lstrip()])
trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]]
return "\n".join(trimmed).strip()
def parse_docstring(docstring):
"""
Parse out the parts of a docstring. Returns (title, body, metadata).
"""
docstring = trim_docstring(docstring)
parts = re.split(r'\n{2,}', docstring)
title = parts[0]
if len(parts) == 1:
body = ''
metadata = {}
else:
parser = HeaderParser()
try:
metadata = parser.parsestr(parts[-1])
except HeaderParseError:
metadata = {}
body = "\n\n".join(parts[1:])
else:
metadata = dict(metadata.items())
if metadata:
body = "\n\n".join(parts[1:-1])
else:
body = "\n\n".join(parts[1:])
return title, body, metadata
def parse_rst(text, default_reference_context, thing_being_parsed=None):
"""
Convert the string from reST to an XHTML fragment.
"""
overrides = {
'doctitle_xform' : True,
'inital_header_level' : 3,
"default_reference_context" : default_reference_context,
"link_base" : reverse('django-admindocs-docroot').rstrip('/')
}
if thing_being_parsed:
thing_being_parsed = smart_str("<%s>" % thing_being_parsed)
parts = docutils.core.publish_parts(text, source_path=thing_being_parsed,
destination_path=None, writer_name='html',
settings_overrides=overrides)
return mark_safe(parts['fragment'])
#
# reST roles
#
ROLES = {
'model' : '%s/models/%s/',
'view' : '%s/views/%s/',
'template' : '%s/templates/%s/',
'filter' : '%s/filters/#%s',
'tag' : '%s/tags/#%s',
}
def create_reference_role(rolename, urlbase):
def _role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
node = docutils.nodes.reference(rawtext, text, refuri=(urlbase % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
def default_reference_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(rawtext, text, refuri=(ROLES[context] % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
if docutils_is_available:
docutils.parsers.rst.roles.register_canonical_role('cmsreference', default_reference_role)
docutils.parsers.rst.roles.DEFAULT_INTERPRETED_ROLE = 'cmsreference'
for name, urlbase in ROLES.items():
create_reference_role(name, urlbase)
|
gilt/incubator-airflow | refs/heads/hbc_prod | airflow/migrations/versions/2e82aab8ef20_rename_user_table.py | 62 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""rename user table
Revision ID: 2e82aab8ef20
Revises: 1968acfc09e3
Create Date: 2016-04-02 19:28:15.211915
"""
# revision identifiers, used by Alembic.
revision = '2e82aab8ef20'
down_revision = '1968acfc09e3'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.rename_table('user', 'users')
def downgrade():
op.rename_table('users', 'user')
|
jiachenning/odoo | refs/heads/8.0 | addons/web_diagram/controllers/main.py | 70 | import openerp
class DiagramView(openerp.http.Controller):
@openerp.http.route('/web_diagram/diagram/get_diagram_info', type='json', auth='user')
def get_diagram_info(self, req, id, model, node, connector,
src_node, des_node, label, **kw):
visible_node_fields = kw.get('visible_node_fields',[])
invisible_node_fields = kw.get('invisible_node_fields',[])
node_fields_string = kw.get('node_fields_string',[])
connector_fields = kw.get('connector_fields',[])
connector_fields_string = kw.get('connector_fields_string',[])
bgcolors = {}
shapes = {}
bgcolor = kw.get('bgcolor','')
shape = kw.get('shape','')
if bgcolor:
for color_spec in bgcolor.split(';'):
if color_spec:
colour, color_state = color_spec.split(':')
bgcolors[colour] = color_state
if shape:
for shape_spec in shape.split(';'):
if shape_spec:
shape_colour, shape_color_state = shape_spec.split(':')
shapes[shape_colour] = shape_color_state
ir_view = req.session.model('ir.ui.view')
graphs = ir_view.graph_get(
int(id), model, node, connector, src_node, des_node, label,
(140, 180), req.session.context)
nodes = graphs['nodes']
transitions = graphs['transitions']
isolate_nodes = {}
for blnk_node in graphs['blank_nodes']:
isolate_nodes[blnk_node['id']] = blnk_node
else:
y = map(lambda t: t['y'],filter(lambda x: x['y'] if x['x']==20 else None, nodes.values()))
y_max = (y and max(y)) or 120
connectors = {}
list_tr = []
for tr in transitions:
list_tr.append(tr)
connectors.setdefault(tr, {
'id': int(tr),
's_id': transitions[tr][0],
'd_id': transitions[tr][1]
})
connector_tr = req.session.model(connector)
connector_ids = connector_tr.search([('id', 'in', list_tr)], 0, 0, 0, req.session.context)
data_connectors =connector_tr.read(connector_ids, connector_fields, req.session.context)
for tr in data_connectors:
transition_id = str(tr['id'])
_sourceid, label = graphs['label'][transition_id]
t = connectors[transition_id]
t.update(
source=tr[src_node][1],
destination=tr[des_node][1],
options={},
signal=label
)
for i, fld in enumerate(connector_fields):
t['options'][connector_fields_string[i]] = tr[fld]
fields = req.session.model('ir.model.fields')
field_ids = fields.search([('model', '=', model), ('relation', '=', node)], 0, 0, 0, req.session.context)
field_data = fields.read(field_ids, ['relation_field'], req.session.context)
node_act = req.session.model(node)
search_acts = node_act.search([(field_data[0]['relation_field'], '=', id)], 0, 0, 0, req.session.context)
data_acts = node_act.read(search_acts, invisible_node_fields + visible_node_fields, req.session.context)
for act in data_acts:
n = nodes.get(str(act['id']))
if not n:
n = isolate_nodes.get(act['id'], {})
y_max += 140
n.update(x=20, y=y_max)
nodes[act['id']] = n
n.update(
id=act['id'],
color='white',
options={}
)
for color, expr in bgcolors.items():
if eval(expr, act):
n['color'] = color
for shape, expr in shapes.items():
if eval(expr, act):
n['shape'] = shape
for i, fld in enumerate(visible_node_fields):
n['options'][node_fields_string[i]] = act[fld]
_id, name = req.session.model(model).name_get([id], req.session.context)[0]
return dict(nodes=nodes,
conn=connectors,
name=name,
parent_field=graphs['node_parent_field'])
|
haad/ansible | refs/heads/devel | lib/ansible/modules/notification/campfire.py | 114 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: campfire
version_added: "1.2"
short_description: Send a message to Campfire
description:
- Send a message to Campfire.
- Messages with newlines will result in a "Paste" message being sent.
options:
subscription:
description:
- The subscription name to use.
required: true
token:
description:
- API token.
required: true
room:
description:
- Room number to which the message should be sent.
required: true
msg:
description:
- The message body.
required: true
notify:
description:
- Send a notification sound before the message.
required: false
choices: ["56k", "bell", "bezos", "bueller", "clowntown",
"cottoneyejoe", "crickets", "dadgummit", "dangerzone",
"danielsan", "deeper", "drama", "greatjob", "greyjoy",
"guarantee", "heygirl", "horn", "horror",
"inconceivable", "live", "loggins", "makeitso", "noooo",
"nyan", "ohmy", "ohyeah", "pushit", "rimshot",
"rollout", "rumble", "sax", "secret", "sexyback",
"story", "tada", "tmyk", "trololo", "trombone", "unix",
"vuvuzela", "what", "whoomp", "yeah", "yodel"]
# informational: requirements for nodes
requirements: [ ]
author: "Adam Garside (@fabulops)"
'''
EXAMPLES = '''
- campfire:
subscription: foo
token: 12345
room: 123
msg: Task completed.
- campfire:
subscription: foo
token: 12345
room: 123
notify: loggins
msg: Task completed ... with feeling.
'''
import cgi
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
def main():
module = AnsibleModule(
argument_spec=dict(
subscription=dict(required=True),
token=dict(required=True, no_log=True),
room=dict(required=True),
msg=dict(required=True),
notify=dict(required=False,
choices=["56k", "bell", "bezos", "bueller",
"clowntown", "cottoneyejoe",
"crickets", "dadgummit", "dangerzone",
"danielsan", "deeper", "drama",
"greatjob", "greyjoy", "guarantee",
"heygirl", "horn", "horror",
"inconceivable", "live", "loggins",
"makeitso", "noooo", "nyan", "ohmy",
"ohyeah", "pushit", "rimshot",
"rollout", "rumble", "sax", "secret",
"sexyback", "story", "tada", "tmyk",
"trololo", "trombone", "unix",
"vuvuzela", "what", "whoomp", "yeah",
"yodel"]),
),
supports_check_mode=False
)
subscription = module.params["subscription"]
token = module.params["token"]
room = module.params["room"]
msg = module.params["msg"]
notify = module.params["notify"]
URI = "https://%s.campfirenow.com" % subscription
NSTR = "<message><type>SoundMessage</type><body>%s</body></message>"
MSTR = "<message><body>%s</body></message>"
AGENT = "Ansible/1.2"
# Hack to add basic auth username and password the way fetch_url expects
module.params['url_username'] = token
module.params['url_password'] = 'X'
target_url = '%s/room/%s/speak.xml' % (URI, room)
headers = {'Content-Type': 'application/xml',
'User-agent': AGENT}
# Send some audible notification if requested
if notify:
response, info = fetch_url(module, target_url, data=NSTR % cgi.escape(notify), headers=headers)
if info['status'] not in [200, 201]:
module.fail_json(msg="unable to send msg: '%s', campfire api"
" returned error code: '%s'" %
(notify, info['status']))
# Send the message
response, info = fetch_url(module, target_url, data=MSTR % cgi.escape(msg), headers=headers)
if info['status'] not in [200, 201]:
module.fail_json(msg="unable to send msg: '%s', campfire api"
" returned error code: '%s'" %
(msg, info['status']))
module.exit_json(changed=True, room=room, msg=msg, notify=notify)
if __name__ == '__main__':
main()
|
pizzapanther/Super-Neutron-Drive | refs/heads/master | server/editor/admin.py | 1 | from django.contrib import admin
from .models import BeamApiKey, EKey
class BAdmin (admin.ModelAdmin):
list_display = ('beam', 'user', 'generated')
date_hierarchy = 'generated'
raw_id_fields = ('user',)
autocomplete_lookup_fields = {'fk': ['user']}
class EAdmin (admin.ModelAdmin):
list_display = ('beam', 'user', 'created')
date_hierarchy = 'created'
raw_id_fields = ('user',)
autocomplete_lookup_fields = {'fk': ['user']}
admin.site.register(EKey, EAdmin)
admin.site.register(BeamApiKey, BAdmin)
|
jjack15/CS402-Project-UTK | refs/heads/master | OnlinePythonTutor/v3/opt-ipy-server.py | 1 | # Adapted from https://github.com/facebook/tornado/tree/master/demos/websocket
import logging
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
import os.path
from tornado.options import define, options
import json
define("port", default=8888, help="run on the given port", type=int)
class Application(tornado.web.Application):
# singleton
current_full_trace = None
def __init__(self):
handlers = [
(r"/js/(.*)",
tornado.web.StaticFileHandler,
{"path": os.path.join(os.path.dirname(__file__), 'js/')}),
(r"/css/(.*)",
tornado.web.StaticFileHandler,
{"path": os.path.join(os.path.dirname(__file__), 'css/')}),
(r"/", MainHandler),
(r"/chatsocket", ChatSocketHandler),
# respond to HTTP POST requests:
(r"/wholetrace", WholeTraceHandler),
(r"/difftrace", DiffTraceHandler),
(r"/clear", ClearHandler),
]
tornado.web.Application.__init__(self, handlers)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("opt-ipy.html")
class WholeTraceHandler(tornado.web.RequestHandler):
def post(self):
message = self.request.body
dat = json.loads(message.decode())
Application.current_full_trace = dat
js_msg=dict(payload=Application.current_full_trace, type='wholetrace')
ChatSocketHandler.send_updates(json.dumps(js_msg))
class DiffTraceHandler(tornado.web.RequestHandler):
def post(self):
# TODO: implement me using, say,
# https://code.google.com/p/google-diff-match-patch/
pass
class ClearHandler(tornado.web.RequestHandler):
def post(self):
Application.current_full_trace = None
js_msg=dict(type='clear')
ChatSocketHandler.send_updates(json.dumps(js_msg))
class ChatSocketHandler(tornado.websocket.WebSocketHandler):
waiters = set()
def allow_draft76(self):
# for iOS 5.0 Safari
return True
def open(self):
ChatSocketHandler.waiters.add(self)
# when a new connection is made, send the entire trace to only
# THIS browser
if Application.current_full_trace:
js_msg=dict(payload=Application.current_full_trace, type='wholetrace')
self.write_message(json.dumps(js_msg))
def on_close(self):
ChatSocketHandler.waiters.remove(self)
@classmethod
def send_updates(cls, chat):
#logging.info("sending message to %d waiters", len(cls.waiters))
for waiter in cls.waiters:
try:
waiter.write_message(chat)
except:
logging.error("Error sending message", exc_info=True)
def main():
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
asteca/ASteCA | refs/heads/master | packages/synth_clust/completeness_rm.py | 1 |
import numpy as np
def main(isoch_binar, completeness):
"""
Remove a number of stars according to the percentages of star loss found in
the 'mag_completeness' function of the luminosity module, for the real
observation.
"""
# Remember that 'comp_perc' here means the percentage of stars that should
# be *REMOVED* from each mag range|bin.
bin_edges, comp_perc = completeness[:2]
# If stars exist in the isochrone beyond the completeness magnitude
# level, then apply the removal of stars. Otherwise, skip it.
if np.max(isoch_binar[0]) > bin_edges[0]:
# Indexes of stars in 'isoch_binar[0]' whose main magnitude
# value falls between the ranges given by 'bin_edges'.
#
# Magnitude values *below* the minimum magnitude edge will be
# assigned the integer '0'.
c_indx = np.searchsorted(bin_edges, isoch_binar[0], side='left')
# Equivalent to np.histogram(isoch_binar[0], bin_edges)[0]
count = np.bincount(c_indx, minlength=len(comp_perc))
# Round to integer and clip at '0' so there are no negative values.
di = np.rint(count * comp_perc).astype(int).clip(0)
# The stars are already shuffled in 'mass_interp', so this selection
# of the first 'd' elements is not removing a given type of star over
# any other.
d_i = []
for i, d in enumerate(di):
d_i.append(np.where(c_indx == i)[0][:d])
d_i = np.concatenate(d_i)
# # DEPRECATED 03/12/19 #445
# # The minimum length is that of the 'comp_perc' list plus one,
# # so after removing the '0' elements both lists will have the same
# # length.
# count = np.bincount(c_indx, minlength=len(comp_perc) + 1)[1:]
# di = np.rint(count * comp_perc).astype(int).clip(0)
# # Actual indexes of stars, stored in each edge range.
# rang_indx = idxFind(len(bin_edges), c_indx)
# # Pick a number (given by the list 'di') of random elements in
# # each range. Those are the indexes of the elements that
# # should be *removed* from the sub-lists.
# d_i = indxRem(di, rang_indx, cmpl_rnd)
# Remove stars pointed to by 'd_i' from *all* the sub-arrays in
# 'isoch_binar'.
isoch_compl = np.delete(isoch_binar, d_i, axis=1)
#
# import matplotlib.pyplot as plt
# plt.hist(isoch_binar[0], bins=bin_edges, histtype='step', label="orig")
# plt.hist(isoch_compl[0], bins=bin_edges, histtype='step', label="new")
# plt.hist(isoch_compl2[0], bins=bin_edges, histtype='step', ls=':',
# label="old")
# plt.legend()
# plt.show()
else:
isoch_compl = isoch_binar
return isoch_compl
# DEPRECATED 03/12/ #445
# def indxRem(di, rang_indx, cmpl_rnd):
# """
# Select a fixed number (given by 'di') of random indexes in 'rang_indx'.
# These correspond to the stars that will be removed in each magnitude
# range.
# Source: https://stackoverflow.com/a/46079837/1391441
# """
# lens = np.array([len(_) for _ in rang_indx])
# di0 = np.minimum(lens, di)
# invalid_mask = lens[:, None] <= np.arange(lens.max())
# # Create a 2D random array in interval [0,1) to cover the max length of
# # subarrays.
# rand_nums = np.copy(cmpl_rnd[:len(lens) * lens.max()].reshape(
# len(lens), lens.max()))
# # For each subarray, set the invalid places to 1.0. Get argsort for each
# # row. Those 1s corresponding to the invalid places would stay at the back
# # because there were no 1s in the original random array. Thus, we have the
# # indices array.
# rand_nums[invalid_mask] = 1
# # Slice each row of those indices array to the extent of the lengths
# # listed in di.
# shuffled_indx = np.argpartition(rand_nums, lens - 1, axis=1)
# # Start a loop and slice each subarray from 'rang_indx' using those sliced
# # indices.
# out = []
# for i, all_idx in enumerate(shuffled_indx):
# if lens[i] > 0:
# slice_idx = all_idx[:di0[i]]
# out += rang_indx[i][slice_idx].tolist()
# return np.asarray(out)
# def idxFind(N, c_indx):
# """
# Store the actual indexes of stars in the accepted edge ranges, stored in
# each corresponding range.
# """
# # Reject stars in the 0th position. These are stars below the value
# # where the completeness loss starts.
# mask = (c_indx > 0)
# # Keep those stars with indexes in the accepted magnitude range.
# c_mask = c_indx[mask]
# # Ordered indexes for the masked stars.
# indices = np.arange(c_indx.size)[mask]
# # Indexes that would sort 'c_mask'.
# sorting_idx = np.argsort(c_mask, kind='mergesort')
# # Keep only the ordered indexes that are associated with 'c_mask'.
# ind_sorted = indices[sorting_idx]
# # Indexes of ordered indexes (N) positioned into 'c_mask'.
# x = np.searchsorted(
# c_mask, range(N), side='right', sorter=sorting_idx)
# # Store star indices into each edge range.
# rang_indx = [ind_sorted[x[i]:x[i + 1]] for i in range(N - 1)]
# return rang_indx
|
DenL/pogom-webhook | refs/heads/webhook | pogom/pgoapi/protos/POGOProtos/Networking/Requests/Messages/UpgradePokemonMessage_pb2.py | 15 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Requests/Messages/UpgradePokemonMessage.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Requests/Messages/UpgradePokemonMessage.proto',
package='POGOProtos.Networking.Requests.Messages',
syntax='proto3',
serialized_pb=_b('\nCPOGOProtos/Networking/Requests/Messages/UpgradePokemonMessage.proto\x12\'POGOProtos.Networking.Requests.Messages\"+\n\x15UpgradePokemonMessage\x12\x12\n\npokemon_id\x18\x01 \x01(\x06\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_UPGRADEPOKEMONMESSAGE = _descriptor.Descriptor(
name='UpgradePokemonMessage',
full_name='POGOProtos.Networking.Requests.Messages.UpgradePokemonMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pokemon_id', full_name='POGOProtos.Networking.Requests.Messages.UpgradePokemonMessage.pokemon_id', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=112,
serialized_end=155,
)
DESCRIPTOR.message_types_by_name['UpgradePokemonMessage'] = _UPGRADEPOKEMONMESSAGE
UpgradePokemonMessage = _reflection.GeneratedProtocolMessageType('UpgradePokemonMessage', (_message.Message,), dict(
DESCRIPTOR = _UPGRADEPOKEMONMESSAGE,
__module__ = 'POGOProtos.Networking.Requests.Messages.UpgradePokemonMessage_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Requests.Messages.UpgradePokemonMessage)
))
_sym_db.RegisterMessage(UpgradePokemonMessage)
# @@protoc_insertion_point(module_scope)
|
rosscdh/pinax-eventlog | refs/heads/master | pinax/eventlog/tests/tests.py | 29 | from django.test import TestCase
class Tests(TestCase):
def setUp(self):
pass
|
HAYASAKA-Ryosuke/faker | refs/heads/master | faker/providers/company/de_DE/__init__.py | 21 | # coding=utf-8
from __future__ import unicode_literals
from .. import Provider as CompanyProvider
class Provider(CompanyProvider):
formats = (
'{{last_name}} {{company_suffix}}',
'{{last_name}} {{last_name}} {{company_suffix}}',
'{{last_name}}',
)
company_suffixes = (
'AG', 'AG', 'AG', 'AG', 'AG & Co. KG', 'AG & Co. KGaA', 'AG & Co. OHG',
'GbR', 'GbR', 'GmbH', 'GmbH', 'GmbH', 'GmbH', 'GmbH & Co. KG',
'GmbH & Co. KG', 'GmbH & Co. KGaA', 'GmbH & Co. OHG', 'KG', 'KG', 'KG',
'KGaA', 'OHG mbH', 'Stiftung & Co. KG', 'Stiftung & Co. KGaA', 'e.G.',
'e.V.',
)
|
syci/l10n-spain | refs/heads/8.0 | l10n_es_aeat_sii/wizards/__init__.py | 7 | # -*- coding: utf-8 -*-
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import account_invoice_refund
from . import aeat_sii_password
from . import send_first_semester
|
katrid/django | refs/heads/master | django/contrib/gis/utils/srs.py | 450 | from django.contrib.gis.gdal import SpatialReference
from django.db import DEFAULT_DB_ALIAS, connections
def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=None):
"""
This function takes a GDAL SpatialReference system and adds its information
to the `spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
"""
if not database:
database = DEFAULT_DB_ALIAS
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception('Spatial reference requires an SRID to be '
'compatible with the spatial backend.')
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {'srid': srs.srid,
'auth_name': auth_name,
'auth_srid': auth_srid or srs.srid,
'proj4text': srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()}
if 'srtext' in srs_field_names:
kwargs['srtext'] = srs.wkt
if 'ref_sys_name' in srs_field_names:
# Spatialite specific
kwargs['ref_sys_name'] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
SpatialRefSys.objects.using(database).get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
SpatialRefSys.objects.using(database).create(**kwargs)
# Alias is for backwards-compatibility purposes.
add_postgis_srs = add_srs_entry
|
apavlo/peloton | refs/heads/master | script/porting/push.py | 25 | #!/usr/bin/python
## ==============================================
## GOAL : Push all branches to github
## ==============================================
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import sys
import shlex
import shutil
import tempfile
import os
import time
import logging
from subprocess import Popen, PIPE
## ==============================================
## LOGGING CONFIGURATION
## ==============================================
LOG = logging.getLogger(__name__)
LOG_handler = logging.StreamHandler()
LOG_formatter = logging.Formatter(
fmt='%(asctime)s [%(funcName)s:%(lineno)03d] %(levelname)-5s: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S'
)
LOG_handler.setFormatter(LOG_formatter)
LOG.addHandler(LOG_handler)
LOG.setLevel(logging.INFO)
## ==============================================
## CONFIGURATION
## ==============================================
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
ROOT_DIR = reduce(os.path.join, [BASE_DIR, os.path.pardir, os.path.pardir])
BUILD_DIR = reduce(os.path.join, [ROOT_DIR, "build"])
SRC_DIR = reduce(os.path.join, [BUILD_DIR, "src"])
TOOLS_DIR = reduce(os.path.join, [BUILD_DIR, "tools"])
initdb = os.path.join(TOOLS_DIR, "initdb")
pg_ctl = os.path.join(TOOLS_DIR, "pg_ctl")
## ==============================================
## UTILS
## ==============================================
def exec_cmd(cmd):
"""
Execute the external command and get its exitcode, stdout and stderr.
"""
args = shlex.split(cmd)
proc = Popen(args, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
exitcode = proc.returncode
print(out)
print(err)
sys.stdout.flush()
assert(exitcode == 0)
branches=[
"origin/boot", "origin/ddl", "origin/fork2", "origin/imcs", "origin/mover", "origin/perf", "origin/reorg", "origin/stats",
"origin/types", "origin/bridge", "origin/dynamic", "origin/format", "origin/logging", "origin/mvcc",
"origin/plantree", "origin/shm", "origin/storage", "origin/valgrind", "origin/case", "origin/executor", "origin/guc",
"origin/master", "origin/nestloopindex", "origin/plantype", "origin/shmcore", "origin/tbb", "origin/varlen", "origin/cpp",
"origin/fork", "origin/mm", "origin/osx", "origin/plog", "origin/shm_temp", "origin/type", "origin/wflag"
]
## ==============================================
## MAIN
## ==============================================
if __name__ == '__main__':
LOG.info("Pushing branches")
for branch in branches:
branch_sub_list = branch.split("/")
branch_sub = branch_sub_list[1]
LOG.info("Pushing branch :: " + branch)
# First, checkout
cmd = "git checkout " + branch
exec_cmd(cmd)
# Another checkout
cmd = "git checkout " + branch_sub
exec_cmd(cmd)
# Finally, push
cmd = "git push origin"
exec_cmd(cmd)
|
eworm-de/pacman | refs/heads/master | test/pacman/tests/sync103.py | 27 | self.description = "Sysupgrade with a local package not existing in sync db"
sp = pmpkg("spkg")
self.addpkg2db("sync", sp)
lp = pmpkg("lpkg")
self.addpkg2db("local", lp)
self.args = "-Su"
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_EXIST=lpkg")
self.addrule("!PKG_EXIST=spkg")
|
opensim-org/opensim-core | refs/heads/master | Bindings/Python/tests/test_swig_additional_interface.py | 1 | """The tests here ensure the proper functionality of modifications/additions we
make to the C++ API, via the SWIG interface (*.i) file.
"""
import os
import unittest
import opensim as osim
# TODO __str__() for numeric types.
# TODO iterators for Vec3, Vector, Set's, and ComponentList's.
# TODO typemaps for Vec3 and Vector.
# TODO indexing for Vec3 and Vector.
# TODO operator overloading operator+().
# TODO size() -> __len__()
test_dir = os.path.join(os.path.dirname(os.path.abspath(osim.__file__)),
'tests')
# Silence warning messages if mesh (.vtp) files cannot be found.
osim.Model.setDebugLevel(0)
class TestSwigAddtlInterface(unittest.TestCase):
def test_markAdopted1(self):
"""Ensures that we can tell an object that some other object is managing
its memory.
"""
a = osim.Model()
assert a.this
assert a.thisown
a._markAdopted()
assert a.this
assert not a.thisown
def test_markAdopted2(self):
a = osim.Model()
ground = a.getGround()
# We just need the following not to cause a segfault.
# Model add*
pa = osim.PathActuator()
pa.setName('pa')
pa.addNewPathPoint("pa-point1", ground, osim.Vec3(0.0,0.0,0.0))
pa.addNewPathPoint("pa-point2", ground, osim.Vec3(1.0,0.0,0.0))
a.addForce(pa)
probe = osim.Umberger2010MuscleMetabolicsProbe()
probe.setName('probe')
a.addProbe(probe)
ma = osim.MuscleAnalysis()
ma.setName('ma')
a.addAnalysis(ma)
pc = osim.PrescribedController()
pc.setName('pc')
a.addController(pc)
body = osim.Body('body1',
1.0,
osim.Vec3(0, 0, 0),
osim.Inertia(0, 0, 0)
)
loc_in_parent = osim.Vec3(0, 0, 0)
orient_in_parent = osim.Vec3(0, 0, 0)
loc_in_body = osim.Vec3(0, 0, 0)
orient_in_body = osim.Vec3(0, 0, 0)
print("creating Weld Joint..")
joint = osim.WeldJoint("weld_joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
print("adding a body ..")
a.addBody(body)
print("adding a joint ..")
a.addJoint(joint)
print("Creating a ConstantDistanceConstraint..")
constr = osim.ConstantDistanceConstraint()
constr.setBody1ByName("ground")
constr.setBody1PointLocation(osim.Vec3(0, 0, 0))
constr.setBody2ByName("body")
constr.setBody2PointLocation(osim.Vec3(1, 0, 0))
constr.setConstantDistance(1)
a.addConstraint(constr)
f = osim.BushingForce("bushing", "ground", "body",
osim.Vec3(2, 2, 2), osim.Vec3(1, 1, 1),
osim.Vec3(0, 0, 0), osim.Vec3(0, 0, 0))
a.addForce(f)
f2 = osim.BushingForce()
a.addForce(f2)
f3 = osim.SpringGeneralizedForce()
a.addForce(f3)
model = osim.Model(os.path.join(test_dir, "arm26.osim"))
g = osim.CoordinateActuator('r_shoulder_elev')
model.addForce(g)
def test_Joint(self):
a = osim.Model()
body = osim.Body('body',
1.0,
osim.Vec3(0, 0, 0),
osim.Inertia(0, 0, 0)
)
loc_in_parent = osim.Vec3(0, -0, 0)
orient_in_parent = osim.Vec3(0, 0, 0)
loc_in_body = osim.Vec3(0, 0, 0)
orient_in_body = osim.Vec3(0, 0, 0)
joint = osim.FreeJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
spatialTransform = osim.SpatialTransform()
joint = osim.CustomJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent, spatialTransform)
del joint
joint = osim.EllipsoidJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent, osim.Vec3(1, 1, 1))
del joint
joint = osim.BallJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
joint = osim.PinJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
joint = osim.SliderJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
joint = osim.WeldJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
joint = osim.GimbalJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
joint = osim.UniversalJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
joint = osim.PlanarJoint("joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
del joint
def test_markAdoptedSets(self):
# Set's.
fus = osim.FunctionSet()
fu1 = osim.Constant()
fus.adoptAndAppend(fu1)
del fus
del fu1
s = osim.ScaleSet()
o = osim.Scale()
s.adoptAndAppend(o)
del s
del o
s = osim.ControlSet()
o = osim.ControlLinear()
s.adoptAndAppend(o)
del s
del o
s = osim.BodyScaleSet()
o = osim.BodyScale()
s.adoptAndAppend(o)
del s
del o
s = osim.PathPointSet()
o = osim.PathPoint()
s.adoptAndAppend(o)
del s
del o
s = osim.IKTaskSet()
o = osim.IKMarkerTask()
s.adoptAndAppend(o)
del s
del o
s = osim.MarkerPairSet()
o = osim.MarkerPair()
s.adoptAndAppend(o)
del s
del o
s = osim.MeasurementSet()
o = osim.Measurement()
s.adoptAndAppend(o)
del s
del o
s = osim.ForceSet()
o = osim.CoordinateLimitForce()
s.adoptAndAppend(o)
del s
del o
s = osim.ForceSet()
o = osim.SpringGeneralizedForce()
s.append(o)
s = osim.ProbeSet()
o = osim.Umberger2010MuscleMetabolicsProbe()
s.adoptAndAppend(o)
del s
del o
a = osim.Model()
body = osim.Body('body',
1.0,
osim.Vec3(0, 0, 0),
osim.Inertia(0, 0, 0)
)
loc_in_parent = osim.Vec3(0, -0, 0)
orient_in_parent = osim.Vec3(0, 0, 0)
loc_in_body = osim.Vec3(0, 0, 0)
orient_in_body = osim.Vec3(0, 0, 0)
joint = osim.WeldJoint("weld_joint",
a.getGround(),
loc_in_parent, orient_in_parent,
body,
loc_in_body, orient_in_parent)
a.addBody(body)
constr = osim.ConstantDistanceConstraint()
constr.setBody1ByName("ground")
constr.setBody1PointLocation(osim.Vec3(0, 0, 0))
constr.setBody2ByName("body")
constr.setBody2PointLocation(osim.Vec3(1, 0, 0))
constr.setConstantDistance(1)
a.addConstraint(constr)
def test_PrescribedController_prescribeControlForActuator(self):
# Test memory management for
# PrescribedController::prescribeControlForActuator().
model = osim.Model()
# Body.
body = osim.Body('b1', 1.0, osim.Vec3(0, 0, 0), osim.Inertia(0, 0, 0))
model.addBody(body)
# Joint.
joint = osim.PinJoint('j1', model.getGround(), body)
model.addJoint(joint)
# Actuator.
actu = osim.CoordinateActuator()
actu.setName('actu')
actu.setCoordinate(joint.get_coordinates(0))
model.addForce(actu)
# Controller.
contr = osim.PrescribedController()
contr.addActuator(actu)
self.assertRaises(RuntimeError,
contr.prescribeControlForActuator, 1, osim.Constant(3))
# The following calls should not cause a memory leak:
contr.prescribeControlForActuator(0, osim.Constant(2))
contr.prescribeControlForActuator('actu', osim.Constant(4))
def test_set_iterator(self):
fs = osim.FunctionSet()
f1 = osim.Constant()
f1.setName("myfunc1")
fs.adoptAndAppend(f1)
f2 = osim.Constant()
f2.setName("myfunc2")
fs.adoptAndAppend(f2)
f3 = osim.Constant()
f3.setName("myfunc3")
fs.adoptAndAppend(f3)
names = ['myfunc1', 'myfunc2', 'myfunc3']
i = 0
for func in fs:
assert func.getName() == names[i]
i += 1
# Test key-value iterator.
j = 0
for k, v in fs.items():
assert k == names[j]
assert k == v.getName()
j += 1
|
epyatopal/geocoder-1 | refs/heads/master | geocoder/bing.py | 2 | #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
from geocoder.base import Base
from geocoder.keys import bing_key
import re
class Bing(Base):
"""
Bing Maps REST Services
=======================
The Bing™ Maps REST Services Application Programming Interface (API)
provides a Representational State Transfer (REST) interface to
perform tasks such as creating a static map with pushpins, geocoding
an address, retrieving imagery metadata, or creating a route.
API Reference
-------------
http://msdn.microsoft.com/en-us/library/ff701714.aspx
Get Bing key
------------
https://www.bingmapsportal.com/
"""
provider = 'bing'
method = 'geocode'
def __init__(self, location, **kwargs):
self.url = 'http://dev.virtualearth.net/REST/v1/Locations'
self.location = location
self.headers = {
'Referer': "http://addxy.com/",
'User-agent': 'Mozilla/5.0'
}
self.params = {
'q': location,
'o': 'json',
'inclnb': 1,
'key': kwargs.get('key', bing_key),
'maxResults': 1
}
self._initialize(**kwargs)
def _catch_errors(self):
status = self.parse['statusDescription']
if not status == 'OK':
self.error = status
def _exceptions(self):
# Build intial Tree with results
sets = self.parse['resourceSets']
if sets:
resources = sets[0]['resources']
if resources:
self._build_tree(resources[0])
for item in self.parse['geocodePoints']:
self._build_tree(item)
@property
def lat(self):
coord = self.parse['point']['coordinates']
if coord:
return coord[0]
@property
def lng(self):
coord = self.parse['point']['coordinates']
if coord:
return coord[1]
@property
def address(self):
return self.parse['address'].get('formattedAddress')
@property
def housenumber(self):
if self.street:
expression = r'\d+'
pattern = re.compile(expression)
match = pattern.search(str(self.street))
if match:
return match.group(0)
@property
def street(self):
return self.parse['address'].get('addressLine')
@property
def neighborhood(self):
return self.parse['address'].get('neighborhood')
@property
def city(self):
return self.parse['address'].get('locality')
@property
def state(self):
return self.parse['address'].get('adminDistrict')
@property
def country(self):
return self.parse['address'].get('countryRegion')
@property
def quality(self):
return self.parse.get('entityType')
@property
def accuracy(self):
return self.parse.get('calculationMethod')
@property
def postal(self):
return self.parse['address'].get('postalCode')
@property
def bbox(self):
if self.parse['bbox']:
south = self.parse['bbox'][0]
north = self.parse['bbox'][2]
west = self.parse['bbox'][1]
east = self.parse['bbox'][3]
return self._get_bbox(south, west, north, east)
if __name__ == '__main__':
g = Bing('453 Booth Street, Ottawa Ontario')
g.debug()
|
VikParuchuri/scan | refs/heads/master | core/tasks/__init__.py | 248 | __author__ = 'vik'
|
jenalgit/django | refs/heads/master | tests/expressions/models.py | 93 | """
Tests for F() query expression syntax.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Employee(models.Model):
firstname = models.CharField(max_length=50)
lastname = models.CharField(max_length=50)
salary = models.IntegerField(blank=True, null=True)
def __str__(self):
return '%s %s' % (self.firstname, self.lastname)
@python_2_unicode_compatible
class Company(models.Model):
name = models.CharField(max_length=100)
num_employees = models.PositiveIntegerField()
num_chairs = models.PositiveIntegerField()
ceo = models.ForeignKey(
Employee,
related_name='company_ceo_set')
point_of_contact = models.ForeignKey(
Employee,
related_name='company_point_of_contact_set',
null=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Number(models.Model):
integer = models.BigIntegerField(db_column='the_integer')
float = models.FloatField(null=True, db_column='the_float')
def __str__(self):
return '%i, %.3f' % (self.integer, self.float)
class Experiment(models.Model):
name = models.CharField(max_length=24)
assigned = models.DateField()
completed = models.DateField()
estimated_time = models.DurationField()
start = models.DateTimeField()
end = models.DateTimeField()
class Meta:
ordering = ('name',)
def duration(self):
return self.end - self.start
@python_2_unicode_compatible
class Time(models.Model):
time = models.TimeField(null=True)
def __str__(self):
return "%s" % self.time
@python_2_unicode_compatible
class UUID(models.Model):
uuid = models.UUIDField(null=True)
def __str__(self):
return "%s" % self.uuid
|
philoniare/horizon | refs/heads/master | horizon/management/commands/startdash.py | 79 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
from optparse import make_option # noqa
import os
from django.core.management.base import CommandError # noqa
from django.core.management.templates import TemplateCommand # noqa
from django.utils.importlib import import_module # noqa
import horizon
class Command(TemplateCommand):
template = os.path.join(horizon.__path__[0], "conf", "dash_template")
option_list = TemplateCommand.option_list + (
make_option('--target',
dest='target',
action='store',
default=None,
help='The directory in which the panel '
'should be created. Defaults to the '
'current directory. The value "auto" '
'may also be used to automatically '
'create the panel inside the specified '
'dashboard module.'),)
help = ("Creates a Django app directory structure for a new dashboard "
"with the given name in the current directory or optionally in "
"the given directory.")
def handle(self, dash_name=None, **options):
if dash_name is None:
raise CommandError("You must provide a dashboard name.")
# Use our default template if one isn't specified.
if not options.get("template", None):
options["template"] = self.template
# We have html templates as well, so make sure those are included.
options["extensions"].extend(["tmpl", "html", "js", "css"])
# Check that the app_name cannot be imported.
try:
import_module(dash_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as an app "
"name. Please try another name." % dash_name)
super(Command, self).handle('dash', dash_name, **options)
target = options.pop("target", None)
if not target:
target = os.path.join(os.curdir, dash_name)
# Rename our python template files.
file_names = glob.glob(os.path.join(target, "*.py.tmpl"))
for filename in file_names:
os.rename(filename, filename[:-5])
|
ThiagoGarciaAlves/intellij-community | refs/heads/master | python/testData/completion/structuralType.after.py | 79 | def f(x):
x.foo
x.bar
x.bar
|
akatsoulas/coss | refs/heads/master | coss/opensource_clubs/migrations/0013_resourcespage.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-11 15:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0019_delete_filter'),
('wagtailcore', '0040_page_draft_title'),
('opensource_clubs', '0012_auto_20170911_1306'),
]
operations = [
migrations.CreateModel(
name='ResourcesPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('heading_text', wagtail.wagtailcore.fields.RichTextField(blank=True, verbose_name='Text')),
('mentors_description', wagtail.wagtailcore.fields.StreamField((('info', wagtail.wagtailcore.blocks.StructBlock((('title', wagtail.wagtailcore.blocks.CharBlock(help_text='General content field, appropriate for questions, titles etc (max 150 chars)', max_length=150)), ('text', wagtail.wagtailcore.blocks.RichTextBlock(blank=True, default='', help_text='WYSIWYG Editor for general purpose content, (max 1000 chars)', max_length=1000, required=False)), ('link', wagtail.wagtailcore.blocks.URLBlock(help_text='Optional field - accepts a URL (max 200 chars)', max_length=200, required=False)), ('link_title', wagtail.wagtailcore.blocks.CharBlock(help_text='Optional field - Title of the link, (max 100 chars)', max_length=100, required=False))), required=True)),))),
('resources_title', models.CharField(blank=True, default='', max_length=50, verbose_name='Title')),
('resources_cta_text', models.CharField(blank=True, default='', max_length=50, verbose_name='CTA Text')),
('resources_cta_link', models.URLField(blank=True, default='', verbose_name='Link')),
('guides', wagtail.wagtailcore.fields.RichTextField(blank=True, verbose_name='Guides')),
('heading_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.AddField(
model_name='clubprofile',
name='is_mentor',
field=models.BooleanField(default=False),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.