repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
JackNokia/robotframework | refs/heads/master | atest/testdata/keywords/resources/MyLibrary2.py | 22 | from robot.libraries.BuiltIn import BuiltIn, register_run_keyword
class MyLibrary2:
def keyword_only_in_library_2(self):
print "Keyword from library 2"
def keyword_in_both_libraries(self):
print "Keyword from library 2"
def keyword_in_all_resources_and_libraries(self):
print "Keyword from library 2"
def keyword_everywhere(self):
print "Keyword from library 2"
def keyword_in_tc_file_overrides_others(self):
raise Exception("This keyword should not be called")
def keyword_in_resource_overrides_libraries(self):
raise Exception("This keyword should not be called")
def no_operation(self):
print "Overrides keyword from BuiltIn library"
def replace_string(self):
print "Overrides keyword from String library"
return "I replace nothing!"
def run_keyword_if(self, expression, name, *args):
return BuiltIn().run_keyword_if(expression, name, *args)
register_run_keyword('MyLibrary2', MyLibrary2.run_keyword_if)
|
soldag/home-assistant | refs/heads/dev | tests/util/test_thread.py | 12 | """Test Home Assistant thread utils."""
import asyncio
import pytest
from homeassistant.util.async_ import run_callback_threadsafe
from homeassistant.util.thread import ThreadWithException
async def test_thread_with_exception_invalid(hass):
"""Test throwing an invalid thread exception."""
finish_event = asyncio.Event()
def _do_nothing(*_):
run_callback_threadsafe(hass.loop, finish_event.set)
test_thread = ThreadWithException(target=_do_nothing)
test_thread.start()
await asyncio.wait_for(finish_event.wait(), timeout=0.1)
with pytest.raises(TypeError):
test_thread.raise_exc(_EmptyClass())
test_thread.join()
async def test_thread_not_started(hass):
"""Test throwing when the thread is not started."""
test_thread = ThreadWithException(target=lambda *_: None)
with pytest.raises(AssertionError):
test_thread.raise_exc(TimeoutError)
async def test_thread_fails_raise(hass):
"""Test throwing after already ended."""
finish_event = asyncio.Event()
def _do_nothing(*_):
run_callback_threadsafe(hass.loop, finish_event.set)
test_thread = ThreadWithException(target=_do_nothing)
test_thread.start()
await asyncio.wait_for(finish_event.wait(), timeout=0.1)
test_thread.join()
with pytest.raises(SystemError):
test_thread.raise_exc(ValueError)
class _EmptyClass:
"""An empty class."""
|
saurabh6790/test-erp | refs/heads/develop | erpnext/projects/doctype/activity_cost/test_activity_cost.py | 22 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from erpnext.projects.doctype.activity_cost.activity_cost import DuplicationError
class TestActivityCost(unittest.TestCase):
def test_duplication(self):
frappe.db.sql("delete from `tabActivity Cost`")
activity_cost1 = frappe.new_doc('Activity Cost')
activity_cost1.update({
"employee": "_T-Employee-0001",
"employee_name": "_Test Employee",
"activity_type": "_Test Activity Type",
"billing_rate": 100,
"costing_rate": 50
})
activity_cost1.insert()
activity_cost2 = frappe.copy_doc(activity_cost1)
self.assertRaises(DuplicationError, activity_cost2.insert )
|
Udayraj123/dashboard_IITG | refs/heads/master | Binder/discussions/migrations/0023_auto_20160711_2352.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.10a1 on 2016-07-11 18:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('discussions', '0022_remove_post_score'),
]
operations = [
migrations.AlterField(
model_name='post',
name='option',
field=models.CharField(choices=[('career', 'career'), ('relationship', 'relationship'), ('education', 'education'), ('other', 'other')], max_length=20, null=True),
),
]
|
Tchanders/socorro | refs/heads/master | socorro/lib/httpclient.py | 11 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import httplib
class HttpClient(object):
"""Class for doing HTTP requests to any server. Encapsulate python's httplib.
"""
def __init__(self, host, port, timeout=None):
"""Set the host, port and optional timeout for all HTTP requests ran by
this client.
"""
self.host = host
self.port = port
self.timeout = timeout
def __enter__(self):
self.conn = httplib.HTTPConnection(self.host, self.port,
timeout=self.timeout)
def __exit__(self, type, value, traceback):
self.conn.close()
def _process_response(self):
"""Return a JSON result after an HTTP Request.
Process the response of an HTTP Request and make it a JSON error if
it failed. Otherwise return the response's content.
"""
response = self.conn.getresponse()
if response.status == 200 or response.status == 201:
data = response.read()
else:
data = {
"error": {
"code": response.status,
"reason": response.reason,
"data": response.read()
}
}
return data
def get(self, url):
"""Send a HTTP GET request to a URL and return the result.
"""
self.conn.request("GET", url)
return self._process_response()
def post(self, url, data):
"""Send a HTTP POST request to a URL and return the result.
"""
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/json"
}
self.conn.request("POST", url, data, headers)
return self._process_response()
def put(self, url, data=None):
"""Send a HTTP PUT request to a URL and return the result.
"""
self.conn.request("PUT", url, data)
return self._process_response()
def delete(self, url):
"""Send a HTTP DELETE request to a URL and return the result.
"""
self.conn.request("DELETE", url)
return self._process_response()
|
ixiom/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/TestResultServer/handlers/__init__.py | 6014 | # Required for Python to search this directory for module files
|
nicoTrombon/DjangoPolls | refs/heads/master | env/Lib/site-packages/django/contrib/gis/db/backends/postgis/creation.py | 87 | from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
class PostGISCreation(DatabaseCreation):
geom_index_type = 'GIST'
geom_index_ops = 'GIST_GEOMETRY_OPS'
geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND'
def sql_indexes_for_field(self, model, f, style):
"Return any spatial index creation SQL for the field."
from django.contrib.gis.db.models.fields import GeometryField
output = super(PostGISCreation, self).sql_indexes_for_field(model, f, style)
if isinstance(f, GeometryField):
gqn = self.connection.ops.geo_quote_name
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
if f.geography or self.connection.ops.geometry:
# Geography and Geometry (PostGIS 2.0+) columns are
# created normally.
pass
else:
# Geometry columns are created by `AddGeometryColumn`
# stored procedure.
output.append(style.SQL_KEYWORD('SELECT ') +
style.SQL_TABLE('AddGeometryColumn') + '(' +
style.SQL_TABLE(gqn(db_table)) + ', ' +
style.SQL_FIELD(gqn(f.column)) + ', ' +
style.SQL_FIELD(str(f.srid)) + ', ' +
style.SQL_COLTYPE(gqn(f.geom_type)) + ', ' +
style.SQL_KEYWORD(str(f.dim)) + ');')
if not f.null:
# Add a NOT NULL constraint to the field
output.append(style.SQL_KEYWORD('ALTER TABLE ') +
style.SQL_TABLE(qn(db_table)) +
style.SQL_KEYWORD(' ALTER ') +
style.SQL_FIELD(qn(f.column)) +
style.SQL_KEYWORD(' SET NOT NULL') + ';')
if f.spatial_index:
# Spatial indexes created the same way for both Geometry and
# Geography columns.
# PostGIS 2.0 does not support GIST_GEOMETRY_OPS. So, on 1.5
# we use GIST_GEOMETRY_OPS, on 2.0 we use either "nd" ops
# which are fast on multidimensional cases, or just plain
# gist index for the 2d case.
if f.geography:
index_ops = ''
elif self.connection.ops.geometry:
if f.dim > 2:
index_ops = ' ' + style.SQL_KEYWORD(self.geom_index_ops_nd)
else:
index_ops = ''
else:
index_ops = ' ' + style.SQL_KEYWORD(self.geom_index_ops)
output.append(style.SQL_KEYWORD('CREATE INDEX ') +
style.SQL_TABLE(qn('%s_%s_id' % (db_table, f.column))) +
style.SQL_KEYWORD(' ON ') +
style.SQL_TABLE(qn(db_table)) +
style.SQL_KEYWORD(' USING ') +
style.SQL_COLTYPE(self.geom_index_type) + ' ( ' +
style.SQL_FIELD(qn(f.column)) + index_ops + ' );')
return output
def sql_table_creation_suffix(self):
if self.connection.template_postgis is not None:
return ' TEMPLATE %s' % (
self.connection.ops.quote_name(self.connection.template_postgis),)
return ''
|
bestdpf/2dbarcode | refs/heads/master | cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/mssdk.py | 34 | #
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/mssdk.py 5023 2010/06/14 22:05:46 scons"
"""engine.SCons.Tool.mssdk
Tool-specific initialization for Microsoft SDKs, both Platform
SDKs and Windows SDKs.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
from MSCommon import mssdk_exists, \
mssdk_setup_env
def generate(env):
"""Add construction variables for an MS SDK to an Environment."""
mssdk_setup_env(env)
def exists(env):
return mssdk_exists()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
gylian/Sick-Beard | refs/heads/master | lib/beets/util/artresizer.py | 15 | # This file is part of beets.
# Copyright 2013, Fabrice Laporte
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Abstraction layer to resize images using PIL, ImageMagick, or a
public resizing proxy if neither is available.
"""
import urllib
import subprocess
import os
from tempfile import NamedTemporaryFile
import logging
from beets import util
# Resizing methods
PIL = 1
IMAGEMAGICK = 2
WEBPROXY = 3
PROXY_URL = 'http://images.weserv.nl/'
log = logging.getLogger('beets')
def resize_url(url, maxwidth):
"""Return a proxied image URL that resizes the original image to
maxwidth (preserving aspect ratio).
"""
return '{0}?{1}'.format(PROXY_URL, urllib.urlencode({
'url': url.replace('http://',''),
'w': str(maxwidth),
}))
def temp_file_for(path):
"""Return an unused filename with the same extension as the
specified path.
"""
ext = os.path.splitext(path)[1]
with NamedTemporaryFile(suffix=ext, delete=False) as f:
return f.name
def pil_resize(maxwidth, path_in, path_out=None):
"""Resize using Python Imaging Library (PIL). Return the output path
of resized image.
"""
path_out = path_out or temp_file_for(path_in)
from PIL import Image
log.debug(u'artresizer: PIL resizing {0} to {1}'.format(
util.displayable_path(path_in), util.displayable_path(path_out)
))
try:
im = Image.open(util.syspath(path_in))
size = maxwidth, maxwidth
im.thumbnail(size, Image.ANTIALIAS)
im.save(path_out)
return path_out
except IOError:
log.error(u"PIL cannot create thumbnail for '{0}'".format(
util.displayable_path(path_in)
))
return path_in
def im_resize(maxwidth, path_in, path_out=None):
"""Resize using ImageMagick's ``convert`` tool.
tool. Return the output path of resized image.
"""
path_out = path_out or temp_file_for(path_in)
log.debug(u'artresizer: ImageMagick resizing {0} to {1}'.format(
util.displayable_path(path_in), util.displayable_path(path_out)
))
# "-resize widthxheight>" shrinks images with dimension(s) larger
# than the corresponding width and/or height dimension(s). The >
# "only shrink" flag is prefixed by ^ escape char for Windows
# compatibility.
try:
util.command_output([
'convert', util.syspath(path_in),
'-resize', '{0}x^>'.format(maxwidth), path_out
])
except subprocess.CalledProcessError:
log.warn(u'artresizer: IM convert failed for {0}'.format(
util.displayable_path(path_in)
))
return path_in
return path_out
BACKEND_FUNCS = {
PIL: pil_resize,
IMAGEMAGICK: im_resize,
}
class Shareable(type):
"""A pseudo-singleton metaclass that allows both shared and
non-shared instances. The ``MyClass.shared`` property holds a
lazily-created shared instance of ``MyClass`` while calling
``MyClass()`` to construct a new object works as usual.
"""
def __init__(cls, name, bases, dict):
super(Shareable, cls).__init__(name, bases, dict)
cls._instance = None
@property
def shared(cls):
if cls._instance is None:
cls._instance = cls()
return cls._instance
class ArtResizer(object):
"""A singleton class that performs image resizes.
"""
__metaclass__ = Shareable
def __init__(self, method=None):
"""Create a resizer object for the given method or, if none is
specified, with an inferred method.
"""
self.method = method or self._guess_method()
log.debug(u"artresizer: method is {0}".format(self.method))
def resize(self, maxwidth, path_in, path_out=None):
"""Manipulate an image file according to the method, returning a
new path. For PIL or IMAGEMAGIC methods, resizes the image to a
temporary file. For WEBPROXY, returns `path_in` unmodified.
"""
if self.local:
func = BACKEND_FUNCS[self.method]
return func(maxwidth, path_in, path_out)
else:
return path_in
def proxy_url(self, maxwidth, url):
"""Modifies an image URL according the method, returning a new
URL. For WEBPROXY, a URL on the proxy server is returned.
Otherwise, the URL is returned unmodified.
"""
if self.local:
return url
else:
return resize_url(url, maxwidth)
@property
def local(self):
"""A boolean indicating whether the resizing method is performed
locally (i.e., PIL or IMAGEMAGICK).
"""
return self.method in BACKEND_FUNCS
@staticmethod
def _guess_method():
"""Determine which resizing method to use. Returns PIL,
IMAGEMAGICK, or WEBPROXY depending on available dependencies.
"""
# Try importing PIL.
try:
__import__('PIL', fromlist=['Image'])
return PIL
except ImportError:
pass
# Try invoking ImageMagick's "convert".
try:
out = util.command_output(['convert', '--version'])
if 'imagemagick' in out.lower():
# system32/convert.exe may be interfering
return IMAGEMAGICK
except (subprocess.CalledProcessError, OSError):
pass
# Fall back to Web proxy method.
return WEBPROXY
|
page-io/Cactus | refs/heads/master | cactus/utils/network.py | 9 | #coding:utf-8
import logging
import time
import urllib2
from cactus.utils.parallel import multiMap
logger = logging.getLogger(__name__)
def retry(exceptions, tries=4, delay=3, backoff=2):
"""
Retry execution in case we fail on one of the exceptions
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
except exceptions as e:
logger.warning("%s, Retrying in %.1f seconds..." % (str(e), mdelay))
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
def internetWorking():
def check(url):
try:
response = urllib2.urlopen(url, timeout = 1)
return True
except urllib2.URLError as err:
pass
return False
return True in multiMap(check, [
'http://www.google.com',
'http://www.apple.com']) |
BorisJeremic/Real-ESSI-Examples | refs/heads/master | parallel/test_cases/8NodeBrick/cantilever_different_Poisson/NumberOfDivision2/PoissonRatio0.20/compare_txt.py | 637 | #!/usr/bin/python
import h5py
import sys
import numpy as np
import os
import re
import random
# find the path to my own python function:
cur_dir=os.getcwd()
sep='test_cases'
test_DIR=cur_dir.split(sep,1)[0]
scriptDIR=test_DIR+'compare_function'
sys.path.append(scriptDIR)
# import my own function for color and comparator
from mycomparator import *
from mycolor_fun import *
# analytic_solution = sys.argv[1]
# numeric_result = sys.argv[2]
analytic_solution = 'analytic_solution.txt'
numeric_result = 'numeric_result.txt'
analytic_sol = np.loadtxt(analytic_solution)
numeric_res = np.loadtxt(numeric_result)
abs_error = abs(analytic_sol - numeric_res)
rel_error = abs_error/analytic_sol
analytic_sol = float(analytic_sol)
numeric_res = float(numeric_res)
rel_error = float(rel_error)
# print the results
case_flag=1
print headrun() , "-----------Testing results-----------------"
print headstep() ,'{0} {1} {2} '.format('analytic_solution ','numeric_result ','error[%]')
print headOK() ,'{0:+e} {1:+e} {2:+0.2f} '.format(analytic_sol, numeric_res, rel_error )
if(case_flag==1):
print headOKCASE(),"-----------Done this case!-----------------"
# legacy backup
# find . -name 'element.fei' -exec bash -c 'mv $0 ${0/element.fei/add_element.include}' {} \;
# find . -name 'constraint.fei' -exec bash -c 'mv $0 ${0/constraint.fei/add_constraint.include}' {} \;
# find . -name 'node.fei' -exec bash -c 'mv $0 ${0/node.fei/add_node.include}' {} \;
# find . -name 'add_node.fei' -exec bash -c 'mv $0 ${0/add_node.fei/add_node.include}' {} \;
# find . -name 'elementLT.fei' -exec bash -c 'mv $0 ${0/elementLT.fei/add_elementLT.include}' {} \;
# sed -i "s/node\.fei/add_node.include/" main.fei
# sed -i "s/add_node\.fei/add_node.include/" main.fei
# sed -i "s/element\.fei/add_element.include/" main.fei
# sed -i "s/elementLT\.fei/add_elementLT.include/" main.fei
# sed -i "s/constraint\.fei/add_constraint.include/" main.fei
# find . -name '*_bak.h5.feioutput' -exec bash -c 'mv $0 ${0/\_bak.h5.feioutput/\_original\.h5.feioutput}' {} \;
|
cristiana214/cristianachavez214-cristianachavez | refs/heads/master | python/src/Lib/plat-mac/Carbon/OSAconst.py | 81 | # Generated from 'OSA.h'
def FOUR_CHAR_CODE(x): return x
from Carbon.AppleEvents import *
kAEUseStandardDispatch = -1
kOSAComponentType = FOUR_CHAR_CODE('osa ')
kOSAGenericScriptingComponentSubtype = FOUR_CHAR_CODE('scpt')
kOSAFileType = FOUR_CHAR_CODE('osas')
kOSASuite = FOUR_CHAR_CODE('ascr')
kOSARecordedText = FOUR_CHAR_CODE('recd')
kOSAScriptIsModified = FOUR_CHAR_CODE('modi')
kOSAScriptIsTypeCompiledScript = FOUR_CHAR_CODE('cscr')
kOSAScriptIsTypeScriptValue = FOUR_CHAR_CODE('valu')
kOSAScriptIsTypeScriptContext = FOUR_CHAR_CODE('cntx')
kOSAScriptBestType = FOUR_CHAR_CODE('best')
kOSACanGetSource = FOUR_CHAR_CODE('gsrc')
typeOSADialectInfo = FOUR_CHAR_CODE('difo')
keyOSADialectName = FOUR_CHAR_CODE('dnam')
keyOSADialectCode = FOUR_CHAR_CODE('dcod')
keyOSADialectLangCode = FOUR_CHAR_CODE('dlcd')
keyOSADialectScriptCode = FOUR_CHAR_CODE('dscd')
kOSANullScript = 0L
kOSANullMode = 0
kOSAModeNull = 0
kOSASupportsCompiling = 0x0002
kOSASupportsGetSource = 0x0004
kOSASupportsAECoercion = 0x0008
kOSASupportsAESending = 0x0010
kOSASupportsRecording = 0x0020
kOSASupportsConvenience = 0x0040
kOSASupportsDialects = 0x0080
kOSASupportsEventHandling = 0x0100
kOSASelectLoad = 0x0001
kOSASelectStore = 0x0002
kOSASelectExecute = 0x0003
kOSASelectDisplay = 0x0004
kOSASelectScriptError = 0x0005
kOSASelectDispose = 0x0006
kOSASelectSetScriptInfo = 0x0007
kOSASelectGetScriptInfo = 0x0008
kOSASelectSetActiveProc = 0x0009
kOSASelectGetActiveProc = 0x000A
kOSASelectScriptingComponentName = 0x0102
kOSASelectCompile = 0x0103
kOSASelectCopyID = 0x0104
kOSASelectCopyScript = 0x0105
kOSASelectGetSource = 0x0201
kOSASelectCoerceFromDesc = 0x0301
kOSASelectCoerceToDesc = 0x0302
kOSASelectSetSendProc = 0x0401
kOSASelectGetSendProc = 0x0402
kOSASelectSetCreateProc = 0x0403
kOSASelectGetCreateProc = 0x0404
kOSASelectSetDefaultTarget = 0x0405
kOSASelectStartRecording = 0x0501
kOSASelectStopRecording = 0x0502
kOSASelectLoadExecute = 0x0601
kOSASelectCompileExecute = 0x0602
kOSASelectDoScript = 0x0603
kOSASelectSetCurrentDialect = 0x0701
kOSASelectGetCurrentDialect = 0x0702
kOSASelectAvailableDialects = 0x0703
kOSASelectGetDialectInfo = 0x0704
kOSASelectAvailableDialectCodeList = 0x0705
kOSASelectSetResumeDispatchProc = 0x0801
kOSASelectGetResumeDispatchProc = 0x0802
kOSASelectExecuteEvent = 0x0803
kOSASelectDoEvent = 0x0804
kOSASelectMakeContext = 0x0805
kOSADebuggerCreateSession = 0x0901
kOSADebuggerGetSessionState = 0x0902
kOSADebuggerSessionStep = 0x0903
kOSADebuggerDisposeSession = 0x0904
kOSADebuggerGetStatementRanges = 0x0905
kOSADebuggerGetBreakpoint = 0x0910
kOSADebuggerSetBreakpoint = 0x0911
kOSADebuggerGetDefaultBreakpoint = 0x0912
kOSADebuggerGetCurrentCallFrame = 0x0906
kOSADebuggerGetCallFrameState = 0x0907
kOSADebuggerGetVariable = 0x0908
kOSADebuggerSetVariable = 0x0909
kOSADebuggerGetPreviousCallFrame = 0x090A
kOSADebuggerDisposeCallFrame = 0x090B
kOSADebuggerCountVariables = 0x090C
kOSASelectComponentSpecificStart = 0x1001
kOSAModePreventGetSource = 0x00000001
kOSAModeNeverInteract = kAENeverInteract
kOSAModeCanInteract = kAECanInteract
kOSAModeAlwaysInteract = kAEAlwaysInteract
kOSAModeDontReconnect = kAEDontReconnect
kOSAModeCantSwitchLayer = 0x00000040
kOSAModeDoRecord = 0x00001000
kOSAModeCompileIntoContext = 0x00000002
kOSAModeAugmentContext = 0x00000004
kOSAModeDisplayForHumans = 0x00000008
kOSAModeDontStoreParent = 0x00010000
kOSAModeDispatchToDirectObject = 0x00020000
kOSAModeDontGetDataForArguments = 0x00040000
kOSAScriptResourceType = kOSAGenericScriptingComponentSubtype
typeOSAGenericStorage = kOSAScriptResourceType
kOSAErrorNumber = keyErrorNumber
kOSAErrorMessage = keyErrorString
kOSAErrorBriefMessage = FOUR_CHAR_CODE('errb')
kOSAErrorApp = FOUR_CHAR_CODE('erap')
kOSAErrorPartialResult = FOUR_CHAR_CODE('ptlr')
kOSAErrorOffendingObject = FOUR_CHAR_CODE('erob')
kOSAErrorExpectedType = FOUR_CHAR_CODE('errt')
kOSAErrorRange = FOUR_CHAR_CODE('erng')
typeOSAErrorRange = FOUR_CHAR_CODE('erng')
keyOSASourceStart = FOUR_CHAR_CODE('srcs')
keyOSASourceEnd = FOUR_CHAR_CODE('srce')
kOSAUseStandardDispatch = kAEUseStandardDispatch
kOSANoDispatch = kAENoDispatch
kOSADontUsePhac = 0x0001
eNotStarted = 0
eRunnable = 1
eRunning = 2
eStopped = 3
eTerminated = 4
eStepOver = 0
eStepIn = 1
eStepOut = 2
eRun = 3
eLocal = 0
eGlobal = 1
eProperties = 2
keyProgramState = FOUR_CHAR_CODE('dsps')
typeStatementRange = FOUR_CHAR_CODE('srng')
keyProcedureName = FOUR_CHAR_CODE('dfnm')
keyStatementRange = FOUR_CHAR_CODE('dfsr')
keyLocalsNames = FOUR_CHAR_CODE('dfln')
keyGlobalsNames = FOUR_CHAR_CODE('dfgn')
keyParamsNames = FOUR_CHAR_CODE('dfpn')
|
virgree/odoo | refs/heads/8.0 | addons/l10n_at/__init__.py | 438 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) conexus.at
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
OKThess/website | refs/heads/master | main/migrations/0039_auto_20170926_1634.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-26 16:34
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0038_auto_20170926_1624'),
]
operations = [
migrations.RenameField(
model_name='team',
old_name='description',
new_name='description_en',
),
]
|
orchidinfosys/odoo | refs/heads/master | addons/mail/models/update.py | 28 | # -*- coding: utf-8 -*-
from ast import literal_eval
import datetime
from functools import partial
import logging
import werkzeug.urls
import urllib2
from openerp import release, SUPERUSER_ID
from openerp.models import AbstractModel
from openerp.tools.translate import _
from openerp.tools.config import config
from openerp.tools import misc
from openerp.exceptions import UserError
_logger = logging.getLogger(__name__)
class publisher_warranty_contract(AbstractModel):
_name = "publisher_warranty.contract"
def _get_message(self, cr, uid):
Users = self.pool['res.users']
user_count = partial(Users.search_count, cr, uid)
get_param = partial(self.pool['ir.config_parameter'].get_param, cr, SUPERUSER_ID)
dbuuid = get_param('database.uuid')
db_create_date = get_param('database.create_date')
limit_date = datetime.datetime.now()
limit_date = limit_date - datetime.timedelta(15)
limit_date_str = limit_date.strftime(misc.DEFAULT_SERVER_DATETIME_FORMAT)
nbr_users = user_count([])
nbr_active_users = user_count([("login_date", ">=", limit_date_str)])
nbr_share_users = 0
nbr_active_share_users = 0
if "share" in Users._fields:
nbr_share_users = user_count([("share", "=", True)])
nbr_active_share_users = user_count([("share", "=", True), ("login_date", ">=", limit_date_str)])
user = Users.browse(cr, uid, uid)
domain = [('application', '=', True), ('state', 'in', ['installed', 'to upgrade', 'to remove'])]
apps = self.pool['ir.module.module'].search_read(cr, SUPERUSER_ID, domain, ['name'])
enterprise_code = get_param('database.enterprise_code')
web_base_url = get_param('web.base.url')
msg = {
"dbuuid": dbuuid,
"nbr_users": nbr_users,
"nbr_active_users": nbr_active_users,
"nbr_share_users": nbr_share_users,
"nbr_active_share_users": nbr_active_share_users,
"dbname": cr.dbname,
"db_create_date": db_create_date,
"version": release.version,
"language": user.lang,
"web_base_url": web_base_url,
"apps": [app['name'] for app in apps],
"enterprise_code": enterprise_code,
}
if user.partner_id.company_id:
company_id = user.partner_id.company_id.id
msg.update(self.pool["res.company"].read(cr, uid, [company_id], ["name", "email", "phone"])[0])
return msg
def _get_sys_logs(self, cr, uid):
"""
Utility method to send a publisher warranty get logs messages.
"""
msg = self._get_message(cr, uid)
arguments = {'arg0': msg, "action": "update"}
arguments_raw = werkzeug.urls.url_encode(arguments)
url = config.get("publisher_warranty_url")
uo = urllib2.urlopen(url, arguments_raw, timeout=30)
try:
submit_result = uo.read()
return literal_eval(submit_result)
finally:
uo.close()
def update_notification(self, cr, uid, ids, cron_mode=True, context=None):
"""
Send a message to OpenERP's publisher warranty server to check the
validity of the contracts, get notifications, etc...
@param cron_mode: If true, catch all exceptions (appropriate for usage in a cron).
@type cron_mode: boolean
"""
try:
try:
result = self._get_sys_logs(cr, uid)
except Exception:
if cron_mode: # we don't want to see any stack trace in cron
return False
_logger.debug("Exception while sending a get logs messages", exc_info=1)
raise UserError(_("Error during communication with the publisher warranty server."))
# old behavior based on res.log; now on mail.message, that is not necessarily installed
IMD = self.pool['ir.model.data']
user = self.pool['res.users'].browse(cr, SUPERUSER_ID, SUPERUSER_ID)
poster = IMD.xmlid_to_object(cr, SUPERUSER_ID, 'mail.channel_all_employees', context=context)
if not (poster and poster.exists()):
if not user.exists():
return True
poster = user
for message in result["messages"]:
try:
poster.message_post(body=message, subtype='mt_comment', partner_ids=[user.partner_id.id])
except Exception:
pass
if result.get('enterprise_info'):
# Update expiration date
self.pool['ir.config_parameter'].set_param(cr, SUPERUSER_ID, 'database.expiration_date', result['enterprise_info'].get('expiration_date'), ['base.group_user'])
self.pool['ir.config_parameter'].set_param(cr, SUPERUSER_ID, 'database.expiration_reason', result['enterprise_info'].get('expiration_reason', 'trial'), ['base.group_system'])
self.pool['ir.config_parameter'].set_param(cr, SUPERUSER_ID, 'database.enterprise_code', result['enterprise_info'].get('enterprise_code'), ['base.group_user'])
except Exception:
if cron_mode:
return False # we don't want to see any stack trace in cron
else:
raise
return True
|
Lafunamor/ns3 | refs/heads/master | src/applications/bindings/callbacks_list.py | 331 | callback_classes = [
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
toolforger/sympy | refs/heads/master | sympy/categories/__init__.py | 122 | """
Category Theory module.
Provides some of the fundamental category-theory-related classes,
including categories, morphisms, diagrams. Functors are not
implemented yet.
The general reference work this module tries to follow is
[JoyOfCats] J. Adamek, H. Herrlich. G. E. Strecker: Abstract and
Concrete Categories. The Joy of Cats.
The latest version of this book should be available for free download
from
katmat.math.uni-bremen.de/acc/acc.pdf
"""
from .baseclasses import (Object, Morphism, IdentityMorphism,
NamedMorphism, CompositeMorphism, Category,
Diagram)
from .diagram_drawing import (DiagramGrid, XypicDiagramDrawer,
xypic_draw_diagram, preview_diagram)
|
sve-odoo/odoo | refs/heads/master | addons/website_sale_delivery/controllers/main.py | 214 | # -*- coding: utf-8 -*-
import openerp
from openerp import http
from openerp.http import request
import openerp.addons.website_sale.controllers.main
class website_sale(openerp.addons.website_sale.controllers.main.website_sale):
@http.route(['/shop/payment'], type='http', auth="public", website=True)
def payment(self, **post):
cr, uid, context = request.cr, request.uid, request.context
order = request.website.sale_get_order(context=context)
carrier_id = post.get('carrier_id')
if carrier_id:
carrier_id = int(carrier_id)
if order:
request.registry['sale.order']._check_carrier_quotation(cr, uid, order, force_carrier_id=carrier_id, context=context)
if carrier_id:
return request.redirect("/shop/payment")
res = super(website_sale, self).payment(**post)
return res
def order_lines_2_google_api(self, order_lines):
""" Transforms a list of order lines into a dict for google analytics """
order_lines_not_delivery = [line for line in order_lines if not line.is_delivery]
return super(website_sale, self).order_lines_2_google_api(order_lines_not_delivery)
|
postrational/django | refs/heads/master | tests/middleware_exceptions/tests.py | 63 | import sys
from django.conf import settings
from django.core.signals import got_request_exception
from django.http import HttpResponse
from django.template.response import TemplateResponse
from django.template import Template
from django.test import TestCase
class TestException(Exception):
pass
# A middleware base class that tracks which methods have been called
class TestMiddleware(object):
def __init__(self):
self.process_request_called = False
self.process_view_called = False
self.process_response_called = False
self.process_template_response_called = False
self.process_exception_called = False
def process_request(self, request):
self.process_request_called = True
def process_view(self, request, view_func, view_args, view_kwargs):
self.process_view_called = True
def process_template_response(self, request, response):
self.process_template_response_called = True
return response
def process_response(self, request, response):
self.process_response_called = True
return response
def process_exception(self, request, exception):
self.process_exception_called = True
# Middleware examples that do the right thing
class RequestMiddleware(TestMiddleware):
def process_request(self, request):
super(RequestMiddleware, self).process_request(request)
return HttpResponse('Request Middleware')
class ViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(ViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
return HttpResponse('View Middleware')
class ResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(ResponseMiddleware, self).process_response(request, response)
return HttpResponse('Response Middleware')
class TemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(TemplateResponseMiddleware, self).process_template_response(request, response)
return TemplateResponse(request, Template('Template Response Middleware'))
class ExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(ExceptionMiddleware, self).process_exception(request, exception)
return HttpResponse('Exception Middleware')
# Sample middlewares that raise exceptions
class BadRequestMiddleware(TestMiddleware):
def process_request(self, request):
super(BadRequestMiddleware, self).process_request(request)
raise TestException('Test Request Exception')
class BadViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(BadViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
raise TestException('Test View Exception')
class BadTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(BadTemplateResponseMiddleware, self).process_template_response(request, response)
raise TestException('Test Template Response Exception')
class BadResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(BadResponseMiddleware, self).process_response(request, response)
raise TestException('Test Response Exception')
class BadExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(BadExceptionMiddleware, self).process_exception(request, exception)
raise TestException('Test Exception Exception')
class BaseMiddlewareExceptionTest(TestCase):
urls = 'middleware_exceptions.urls'
def setUp(self):
self.exceptions = []
got_request_exception.connect(self._on_request_exception)
self.client.handler.load_middleware()
def tearDown(self):
got_request_exception.disconnect(self._on_request_exception)
self.exceptions = []
def _on_request_exception(self, sender, request, **kwargs):
self.exceptions.append(sys.exc_info())
def _add_middleware(self, middleware):
self.client.handler._request_middleware.insert(0, middleware.process_request)
self.client.handler._view_middleware.insert(0, middleware.process_view)
self.client.handler._template_response_middleware.append(middleware.process_template_response)
self.client.handler._response_middleware.append(middleware.process_response)
self.client.handler._exception_middleware.append(middleware.process_exception)
def assert_exceptions_handled(self, url, errors, extra_error=None):
try:
response = self.client.get(url)
except TestException:
# Test client intentionally re-raises any exceptions being raised
# during request handling. Hence actual testing that exception was
# properly handled is done by relying on got_request_exception
# signal being sent.
pass
except Exception as e:
if type(extra_error) != type(e):
self.fail("Unexpected exception: %s" % e)
self.assertEqual(len(self.exceptions), len(errors))
for i, error in enumerate(errors):
exception, value, tb = self.exceptions[i]
self.assertEqual(value.args, (error, ))
def assert_middleware_usage(self, middleware, request, view, template_response, response, exception):
self.assertEqual(middleware.process_request_called, request)
self.assertEqual(middleware.process_view_called, view)
self.assertEqual(middleware.process_template_response_called, template_response)
self.assertEqual(middleware.process_response_called, response)
self.assertEqual(middleware.process_exception_called, exception)
class MiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_middleware(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, True, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_exception_middleware(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view'], Exception())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_template_response_error(self):
middleware = TestMiddleware()
self._add_middleware(middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response_error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(middleware, True, True, True, True, False)
class BadMiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', ['Test Template Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view', 'Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
'Test Response Exception'
])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
_missing = object()
class RootUrlconfTests(TestCase):
urls = 'middleware_exceptions.urls'
def test_missing_root_urlconf(self):
try:
original_ROOT_URLCONF = settings.ROOT_URLCONF
del settings.ROOT_URLCONF
except AttributeError:
original_ROOT_URLCONF = _missing
self.assertRaises(AttributeError,
self.client.get, "/middleware_exceptions/view/"
)
if original_ROOT_URLCONF is not _missing:
settings.ROOT_URLCONF = original_ROOT_URLCONF
|
wuzheng-sjtu/FastFPN | refs/heads/master | libs/layers/roi.py | 2 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import libs.boxes.cython_bbox as cython_bbox
import libs.configs.config_v1 as cfg
from libs.boxes.bbox_transform import bbox_transform, bbox_transform_inv, clip_boxes
from libs.logs.log import LOG
# FLAGS = tf.app.flags.FLAGS
_DEBUG = False
def encode(gt_boxes, rois, num_classes):
"""Matching and Encoding groundtruth boxes (gt_boxes) into learning targets to boxes
Sampling
Parameters
---------
gt_boxes an array of shape (G x 5), [x1, y1, x2, y2, class]
rois an array of shape (R x 4), [x1, y1, x2, y2]
num_classes: scalar, number of classes
Returns
--------
labels: Nx1 array in [0, num_classes)
bbox_targets: of shape (N, Kx4) regression targets
bbox_inside_weights: of shape (N, Kx4), in {0, 1} indicating which class is assigned.
"""
all_rois = rois
num_rois = rois.shape[0]
if gt_boxes.size > 0:
# R x G matrix
overlaps = cython_bbox.bbox_overlaps(
np.ascontiguousarray(all_rois[:, 0:4], dtype=np.float),
np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float))
gt_assignment = overlaps.argmax(axis=1) # R
# max_overlaps = overlaps.max(axis=1) # R
max_overlaps = overlaps[np.arange(rois.shape[0]), gt_assignment]
# note: this will assign every rois with a positive label
# labels = gt_boxes[gt_assignment, 4]
labels = np.zeros([num_rois], dtype=np.float32)
labels[:] = -1
# if _DEBUG:
# print ('gt_assignment')
# print (gt_assignment)
# sample rois as to 1:3
fg_inds = np.where(max_overlaps >= cfg.FLAGS.fg_threshold)[0]
fg_rois = int(min(fg_inds.size, cfg.FLAGS.rois_per_image * cfg.FLAGS.fg_roi_fraction))
if fg_inds.size > 0 and fg_rois < fg_inds.size:
fg_inds = np.random.choice(fg_inds, size=fg_rois, replace=False)
labels[fg_inds] = gt_boxes[gt_assignment[fg_inds], 4]
# TODO: sampling strategy
bg_inds = np.where((max_overlaps < cfg.FLAGS.bg_threshold))[0]
bg_rois = max(min(cfg.FLAGS.rois_per_image - fg_rois, fg_rois * 3), 64)
if bg_inds.size > 0 and bg_rois < bg_inds.size:
bg_inds = np.random.choice(bg_inds, size=bg_rois, replace=False)
labels[bg_inds] = 0
# ignore rois with overlaps between fg_threshold and bg_threshold
ignore_inds = np.where(((max_overlaps > cfg.FLAGS.bg_threshold) &\
(max_overlaps < cfg.FLAGS.fg_threshold)))[0]
labels[ignore_inds] = -1
keep_inds = np.append(fg_inds, bg_inds)
if _DEBUG:
print ('keep_inds')
print (keep_inds)
print ('fg_inds')
print (fg_inds)
print ('bg_inds')
print (bg_inds)
print ('bg_rois:', bg_rois)
print ('cfg.FLAGS.bg_threshold:', cfg.FLAGS.bg_threshold)
# print (max_overlaps)
LOG('ROIEncoder: %d positive rois, %d negative rois' % (len(fg_inds), len(bg_inds)))
bbox_targets, bbox_inside_weights = _compute_targets(
rois[keep_inds, 0:4], gt_boxes[gt_assignment[keep_inds], :4], labels[keep_inds], num_classes)
bbox_targets = _unmap(bbox_targets, num_rois, keep_inds, 0)
bbox_inside_weights = _unmap(bbox_inside_weights, num_rois, keep_inds, 0)
else:
# there is no gt
labels = np.zeros((num_rois, ), np.float32)
bbox_targets = np.zeros((num_rois, 4 * num_classes), np.float32)
bbox_inside_weights = np.zeros((num_rois, 4 * num_classes), np.float32)
bg_rois = min(int(cfg.FLAGS.rois_per_image * (1 - cfg.FLAGS.fg_roi_fraction)), 64)
if bg_rois < num_rois:
bg_inds = np.arange(num_rois)
ignore_inds = np.random.choice(bg_inds, size=num_rois - bg_rois, replace=False)
labels[ignore_inds] = -1
return labels, bbox_targets, bbox_inside_weights
def decode(boxes, scores, rois, ih, iw):
"""Decode prediction targets into boxes and only keep only one boxes of greatest possibility for each rois
Parameters
---------
boxes: an array of shape (R, Kx4), [x1, y1, x2, y2, x1, x2, y1, y2]
scores: an array of shape (R, K),
rois: an array of shape (R, 4), [x1, y1, x2, y2]
Returns
--------
final_boxes: of shape (R x 4)
classes: of shape (R) in {0,1,2,3... K-1}
scores: of shape (R) in [0 ~ 1]
"""
boxes = bbox_transform_inv(rois, deltas=boxes)
classes = np.argmax(scores, axis=1)
classes = classes.astype(np.int32)
scores = np.max(scores, axis=1)
final_boxes = np.zeros((boxes.shape[0], 4), dtype=np.float32)
for i in np.arange(0, boxes.shape[0]):
ind = classes[i]*4
final_boxes[i, 0:4] = boxes[i, ind:ind+4]
final_boxes = clip_boxes(final_boxes, (ih, iw))
return final_boxes, classes, scores
def _compute_targets(ex_rois, gt_rois, labels, num_classes):
"""
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets).
Returns:
bbox_target (ndarray): N x 4K blob of regression targets
bbox_inside_weights (ndarray): N x 4K blob of loss weights
"""
assert ex_rois.shape[0] == gt_rois.shape[0]
assert ex_rois.shape[1] == 4
assert gt_rois.shape[1] == 4
targets = bbox_transform(ex_rois, gt_rois)
clss = labels
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = int(clss[ind])
start = 4 * cls
end = start + 4
bbox_targets[ind, start:end] = targets[ind, 0:4]
bbox_inside_weights[ind, start:end] = 1
return bbox_targets, bbox_inside_weights
def _unmap(data, count, inds, fill=0):
""" Unmap a subset of item (data) back to the original set of items (of
size count) """
if len(data.shape) == 1:
ret = np.empty((count,), dtype=np.float32)
ret.fill(fill)
ret[inds] = data
else:
ret = np.empty((count,) + data.shape[1:], dtype=np.float32)
ret.fill(fill)
ret[inds, :] = data
return ret
if __name__ == '__main__':
cfg.FLAGS.fg_threshold = 0.1
classes = np.random.randint(0, 3, (10, 1))
boxes = np.random.randint(10, 50, (10, 2))
s = np.random.randint(10, 20, (10, 2))
s = boxes + s
boxes = np.concatenate((boxes, s), axis=1)
gt_boxes = np.hstack((boxes, classes))
noise = np.random.randint(-3, 3, (10, 4))
rois = gt_boxes[:, :4] + noise
labels, rois, bbox_targets, bbox_inside_weights = encode(gt_boxes, rois, num_classes=3)
print (labels)
print (bbox_inside_weights)
ls = np.zeros((labels.shape[0], 3))
for i in range(labels.shape[0]):
ls[i, labels[i]] = 1
final_boxes, classes, scores = decode(bbox_targets, ls, rois, 100, 100)
print('gt_boxes:\n', gt_boxes)
print ('final boxes:\n', np.hstack((final_boxes, np.expand_dims(classes, axis=1))).astype(np.int32))
# print (final_boxes.astype(np.int32))
|
yxxyun/shadowsocks | refs/heads/master | shadowsocks/asyncdns.py | 655 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_IPV4 = 0
STATUS_IPV6 = 1
class DNSResolver(object):
def __init__(self, server_list=None):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if line:
if line.startswith(b'nameserver'):
parts = line.split()
if len(parts) >= 2:
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) >= 2:
ip = parts[0]
if common.is_ip(ip):
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_IPV6) \
== STATUS_IPV4:
self._hostname_status[hostname] = STATUS_IPV6
self._send_req(hostname, QTYPE_AAAA)
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) == STATUS_IPV6:
for question in response.questions:
if question[1] == QTYPE_AAAA:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_IPV4
self._send_req(hostname, QTYPE_A)
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, QTYPE_A)
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&$@.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
|
txm/make-good | refs/heads/master | django/views/generic/__init__.py | 493 | from django.views.generic.base import View, TemplateView, RedirectView
from django.views.generic.dates import (ArchiveIndexView, YearArchiveView, MonthArchiveView,
WeekArchiveView, DayArchiveView, TodayArchiveView,
DateDetailView)
from django.views.generic.detail import DetailView
from django.views.generic.edit import FormView, CreateView, UpdateView, DeleteView
from django.views.generic.list import ListView
class GenericViewError(Exception):
"""A problem in a generic view."""
pass
|
Sun-Wukong/CokeNRum | refs/heads/master | crblog/migrations/0001_initial.py | 12 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(null=True, blank=True)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
|
aferr/LatticeMemCtl | refs/heads/master | src/arch/x86/isa/insts/simd64/floating_point/compare/compare_and_write_mask.py | 91 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# PFCMPEQ
# PFCMPGT
# PFCMPGE
'''
|
hujiajie/chromium-crosswalk | refs/heads/master | tools/telemetry/telemetry/internal/platform/power_monitor/powermetrics_power_monitor.py | 8 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import logging
import os
import plistlib
import shutil
import tempfile
import xml.parsers.expat
from telemetry.core import os_version
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.platform import power_monitor
# TODO: rename this class (seems like this is used by mac)
class PowerMetricsPowerMonitor(power_monitor.PowerMonitor):
def __init__(self, backend):
super(PowerMetricsPowerMonitor, self).__init__()
self._powermetrics_process = None
self._backend = backend
self._output_filename = None
self._output_directory = None
@property
def binary_path(self):
return '/usr/bin/powermetrics'
def StartMonitoringPower(self, browser):
self._CheckStart()
# Empirically powermetrics creates an empty output file immediately upon
# starting. We detect file creation as a signal that measurement has
# started. In order to avoid various race conditions in tempfile creation
# we create a temp directory and have powermetrics create it's output
# there rather than say, creating a tempfile, deleting it and reusing its
# name.
self._output_directory = tempfile.mkdtemp()
self._output_filename = os.path.join(self._output_directory,
'powermetrics.output')
args = ['-f', 'plist',
'-u', self._output_filename,
'-i0',
'--show-usage-summary']
self._powermetrics_process = self._backend.LaunchApplication(
self.binary_path, args, elevate_privilege=True)
# Block until output file is written to ensure this function call is
# synchronous in respect to powermetrics starting.
def _OutputFileExists():
return os.path.isfile(self._output_filename)
util.WaitFor(_OutputFileExists, 1)
@decorators.Cache
def CanMonitorPower(self):
mavericks_or_later = (
self._backend.GetOSVersionName() >= os_version.MAVERICKS)
binary_path = self.binary_path
return mavericks_or_later and self._backend.CanLaunchApplication(
binary_path)
@staticmethod
def _ParsePlistString(plist_string):
"""Wrapper to parse a plist from a string and catch any errors.
Sometimes powermetrics will exit in the middle of writing it's output,
empirically it seems that it always writes at least one sample in it's
entirety so we can safely ignore any errors in it's output.
Returns:
Parser output on successful parse, None on parse error.
"""
try:
return plistlib.readPlistFromString(plist_string)
except xml.parsers.expat.ExpatError:
return None
@staticmethod
def ParsePowerMetricsOutput(powermetrics_output):
"""Parse output of powermetrics command line utility.
Returns:
Dictionary in the format returned by StopMonitoringPower() or None
if |powermetrics_output| is empty - crbug.com/353250 .
"""
if len(powermetrics_output) == 0:
logging.warning('powermetrics produced zero length output')
return {}
# Container to collect samples for running averages.
# out_path - list containing the key path in the output dictionary.
# src_path - list containing the key path to get the data from in
# powermetrics' output.
def ConstructMetric(out_path, src_path):
RunningAverage = collections.namedtuple('RunningAverage', [
'out_path', 'src_path', 'samples'])
return RunningAverage(out_path, src_path, [])
# List of RunningAverage objects specifying metrics we want to aggregate.
metrics = [
ConstructMetric(
['platform_info', 'average_frequency_hz'],
['processor', 'freq_hz']),
ConstructMetric(
['platform_info', 'idle_percent'],
['processor', 'packages', 0, 'c_state_ratio'])]
def DataWithMetricKeyPath(metric, powermetrics_output):
"""Retrieve the sample from powermetrics' output for a given metric.
Args:
metric: The RunningAverage object we want to collect a new sample for.
powermetrics_output: Dictionary containing powermetrics output.
Returns:
The sample corresponding to |metric|'s keypath."""
# Get actual data corresponding to key path.
out_data = powermetrics_output
for k in metric.src_path:
out_data = out_data[k]
assert type(out_data) in [int, float], (
'Was expecting a number: %s (%s)' % (type(out_data), out_data))
return float(out_data)
sample_durations = []
total_energy_consumption_mwh = 0
# powermetrics outputs multiple plists separated by null terminators.
raw_plists = powermetrics_output.split('\0')
raw_plists = [x for x in raw_plists if len(x) > 0]
assert len(raw_plists) == 1
# -------- Examine contents of first plist for systems specs. --------
plist = PowerMetricsPowerMonitor._ParsePlistString(raw_plists[0])
if not plist:
logging.warning('powermetrics produced invalid output, output length: '
'%d', len(powermetrics_output))
return {}
# Powermetrics doesn't record power usage when running on a VM.
hw_model = plist.get('hw_model')
if hw_model and hw_model.startswith('VMware'):
return {}
if 'GPU' in plist:
metrics.extend([
ConstructMetric(
['component_utilization', 'gpu', 'average_frequency_hz'],
['GPU', 0, 'freq_hz']),
ConstructMetric(
['component_utilization', 'gpu', 'idle_percent'],
['GPU', 0, 'c_state_ratio'])])
# There's no way of knowing ahead of time how many cpus and packages the
# current system has. Iterate over cores and cpus - construct metrics for
# each one.
if 'processor' in plist:
core_dict = plist['processor']['packages'][0]['cores']
num_cores = len(core_dict)
cpu_num = 0
for core_idx in xrange(num_cores):
num_cpus = len(core_dict[core_idx]['cpus'])
base_src_path = ['processor', 'packages', 0, 'cores', core_idx]
for cpu_idx in xrange(num_cpus):
base_out_path = ['component_utilization', 'cpu%d' % cpu_num]
# C State ratio is per-package, component CPUs of that package may
# have different frequencies.
metrics.append(ConstructMetric(
base_out_path + ['average_frequency_hz'],
base_src_path + ['cpus', cpu_idx, 'freq_hz']))
metrics.append(ConstructMetric(
base_out_path + ['idle_percent'],
base_src_path + ['c_state_ratio']))
cpu_num += 1
# -------- Parse Data Out of Plists --------
plist = PowerMetricsPowerMonitor._ParsePlistString(raw_plists[0])
if not plist:
logging.error('Error parsing plist.')
return {}
# Duration of this sample.
sample_duration_ms = int(plist['elapsed_ns']) / 10 ** 6
sample_durations.append(sample_duration_ms)
if 'processor' not in plist:
logging.error("'processor' field not found in plist.")
return {}
processor = plist['processor']
total_energy_consumption_mwh = (
(float(processor.get('package_joules', 0)) / 3600.) * 10 ** 3)
for m in metrics:
try:
m.samples.append(DataWithMetricKeyPath(m, plist))
except KeyError:
# Old CPUs don't have c-states, so if data is missing, just ignore it.
logging.info('Field missing from powermetrics output: %s', m.src_path)
continue
# -------- Collect and Process Data --------
out_dict = {}
out_dict['identifier'] = 'powermetrics'
out_dict['energy_consumption_mwh'] = total_energy_consumption_mwh
def StoreMetricAverage(metric, sample_durations, out):
"""Calculate average value of samples in a metric and store in output
path as specified by metric.
Args:
metric: A RunningAverage object containing samples to average.
sample_durations: A list which parallels the samples list containing
the time slice for each sample.
out: The output dicat, average is stored in the location specified by
metric.out_path.
"""
if len(metric.samples) == 0:
return
assert len(metric.samples) == len(sample_durations)
avg = 0
for i in xrange(len(metric.samples)):
avg += metric.samples[i] * sample_durations[i]
avg /= sum(sample_durations)
# Store data in output, creating empty dictionaries as we go.
for k in metric.out_path[:-1]:
if not out.has_key(k):
out[k] = {}
out = out[k]
out[metric.out_path[-1]] = avg
for m in metrics:
StoreMetricAverage(m, sample_durations, out_dict)
return out_dict
def _KillPowerMetricsProcess(self):
"""Kill a running powermetrics process."""
try:
if self._powermetrics_process.poll() is None:
self._powermetrics_process.terminate()
except OSError as e:
logging.warning(
'Error when trying to terminate powermetric process: %s', repr(e))
if self._powermetrics_process.poll() is None:
# terminate() can fail when Powermetrics does not have the SetUID set.
self._backend.LaunchApplication(
'/usr/bin/pkill',
['-SIGTERM', os.path.basename(self.binary_path)],
elevate_privilege=True)
def StopMonitoringPower(self):
self._CheckStop()
# Tell powermetrics to take an immediate sample.
try:
self._KillPowerMetricsProcess()
(power_stdout, power_stderr) = self._powermetrics_process.communicate()
returncode = self._powermetrics_process.returncode
assert returncode in [0, -15], (
"""powermetrics error
return code=%d
stdout=(%s)
stderr=(%s)""" % (returncode, power_stdout, power_stderr))
with open(self._output_filename, 'rb') as output_file:
powermetrics_output = output_file.read()
return PowerMetricsPowerMonitor.ParsePowerMetricsOutput(
powermetrics_output)
except Exception as e:
logging.warning(
'Error when trying to collect power monitoring data: %s', repr(e))
return PowerMetricsPowerMonitor.ParsePowerMetricsOutput('')
finally:
shutil.rmtree(self._output_directory)
self._output_directory = None
self._output_filename = None
self._powermetrics_process = None
|
louyihua/edx-platform | refs/heads/master | cms/djangoapps/contentstore/features/course_import.py | 176 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
import os
from lettuce import world, step
from django.conf import settings
def import_file(filename):
world.browser.execute_script("$('input.file-input').css('display', 'block')")
path = os.path.join(settings.COMMON_TEST_DATA_ROOT, "imports", filename)
world.browser.attach_file('course-data', os.path.abspath(path))
world.css_click('input.submit-button')
# Go to course outline
world.click_course_content()
outline_css = 'li.nav-course-courseware-outline a'
world.css_click(outline_css)
@step('I go to the import page$')
def go_to_import(step):
menu_css = 'li.nav-course-tools'
import_css = 'li.nav-course-tools-import a'
world.css_click(menu_css)
world.css_click(import_css)
|
AllMyChanges/allmychanges.com | refs/heads/master | allmychanges/management/commands/load_migrated.py | 1 | # coding: utf-8
import os
import shutil
import json
from django.core.management.base import BaseCommand
from django.conf import settings
from twiggy_goodies.threading import log
from twiggy_goodies.django import LogMixin
from allmychanges.downloaders import guess_downloaders
from allmychanges.models import (
Changelog)
from allmychanges.utils import (
update_fields)
from clint.textui import progress
DOWNLOADERS_MAP = {
u'feed': 'feed',
u'git': 'vcs.git',
u'github_releases': 'github_releases',
u'git_commits': 'vcs.git_commits',
u'hg': 'vcs.hg',
u'http': 'http',
u'rechttp': 'http',
u'google_play': 'google_play',
u'itunes': 'appstore',
}
def parse_package_name(name):
try:
return {'pk': int(name)}
except ValueError:
pass
if '/' in name:
namespace, name = name.split('/', 1)
return dict(namespace=namespace,
name=name)
return dict(name=name)
def cleanup_source(source):
return source.split('+', 1)[-1]
def migrate_settings(downloader, ch):
downloader_settings = {}
prev_downloader = ch.downloader
import re
is_http = re.compile(ur'^https?://.*')
search_list = ch.search_list.split('\n')
ignore_list = ch.ignore_list.split('\n')
if downloader is 'http':
downloader_search_list = filter(
is_http.match, search_list)
if downloader_search_list:
downloader_settings['search_list'] = downloader_search_list
downloader_ignore_list = filter(
is_http.match, ignore_list)
if downloader_ignore_list:
downloader_settings['ignore_list'] = downloader_ignore_list
if prev_downloader == 'rechttp':
downloader_settings['recursive'] = True
search_list = filter(
lambda line: not is_http.match(line), search_list)
search_list = u'\n'.join(search_list)
ignore_list = filter(
lambda line: not is_http.match(line), ignore_list)
ignore_list = u'\n'.join(ignore_list)
return (downloader_settings,
search_list,
ignore_list,
ch.xslt)
def migrate(ch):
with log.name_and_fields('migrator',
changelog=u'{0.namespace}/{0.name}'.format(ch)):
cache_dir = os.path.join(settings.TEMP_DIR, 'git-cache')
if os.path.exists(cache_dir):
log.info('Removing cache_dir')
shutil.rmtree(cache_dir)
if not ch.name:
log.info('Has no name')
return 'has no name'
if not ch.downloaders:
log.info('Migrating')
downloaders = list(guess_downloaders(ch.source))
downloader = DOWNLOADERS_MAP.get(ch.downloader)
if downloader and downloaders:
downloader_names = set(d['name'] for d in downloaders)
if downloader not in downloader_names:
log.info('Downloader "{0}" is not in the list "{1}"'.format(
downloader,
', '.join(downloader_names)))
log.info('Done 1')
return 'downloader not in guessed'
if downloader == 'git':
versions_sources = set(
v.source.lower()
for v in ch.versions.all())
if 'vcs' in versions_sources:
downloader = 'vcs.git_commits'
source = cleanup_source(ch.source)
(downloader_settings,
search_list,
ignore_list,
xslt) = migrate_settings(downloader, ch)
try:
update_fields(ch,
source=source,
downloaders=downloaders,
downloader=downloader,
downloader_settings=downloader_settings,
search_list=search_list,
ignore_list=ignore_list)
except Exception as e:
if 'Duplicate entry' in str(e):
log.trace().error('Duplicate error')
return 'duplicate error'
raise
log.info('Downloader is "{0}"'.format(downloader))
else:
log.info(('No downloader or downloaders '
'and original downloader is "{0}"').format(
ch.downloader))
else:
log.info('Seems that changelog already migrated')
return 'already was migrated'
log.info('Done 2')
return 'migrated'
class Command(LogMixin, BaseCommand):
help = u"""Download package sources into a temporary directory."""
def handle(self, *args, **options):
filename = 'migration.json'
with open(filename, 'r') as f:
data = json.load(f)
for item in progress.bar(data):
try:
ch = Changelog.objects.get(pk=item['pk'])
except Changelog.DoesNotExist:
print 'does not exists', item['pk']
continue
if ch.name != item['name'] or ch.name != item['name']:
print 'name or namespace are not equal to the database for {0}'.format(item['pk'])
continue
try:
if 'pk' in item:
item.pop('pk')
update_fields(ch, **item)
except Exception as e:
if 'Duplicate entry' in str(e):
log.trace().error('Duplicate error')
print 'duplicate error', ch.id
continue
|
damonkohler/sl4a | refs/heads/master | python/src/Lib/test/relimport.py | 400 | from .test_import import *
|
dsdinter/learning-spark-examples | refs/heads/master | src/python/BasicAvg.py | 42 | """
>>> from pyspark.context import SparkContext
>>> sc = SparkContext('local', 'test')
>>> b = sc.parallelize([1, 2, 3, 4])
>>> basicAvg(b)
2.5
"""
import sys
from pyspark import SparkContext
def basicAvg(nums):
"""Compute the avg"""
sumCount = nums.map(lambda x: (x, 1)).fold(
(0, 0), (lambda x, y: (x[0] + y[0], x[1] + y[1])))
return sumCount[0] / float(sumCount[1])
if __name__ == "__main__":
master = "local"
if len(sys.argv) == 2:
master = sys.argv[1]
sc = SparkContext(master, "Sum")
nums = sc.parallelize([1, 2, 3, 4])
avg = basicAvg(nums)
print avg
|
curtacircuitos/pcb-tools | refs/heads/master | gerber/tests/test_cam.py | 1 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Hamilton Kibbe <ham@hamiltonkib.be>
import pytest
from ..cam import CamFile, FileSettings
def test_filesettings_defaults():
""" Test FileSettings default values
"""
fs = FileSettings()
assert fs.format == (2, 5)
assert fs.notation == "absolute"
assert fs.zero_suppression == "trailing"
assert fs.units == "inch"
def test_filesettings_dict():
""" Test FileSettings Dict
"""
fs = FileSettings()
assert fs["format"] == (2, 5)
assert fs["notation"] == "absolute"
assert fs["zero_suppression"] == "trailing"
assert fs["units"] == "inch"
def test_filesettings_assign():
""" Test FileSettings attribute assignment
"""
fs = FileSettings()
fs.units = "test1"
fs.notation = "test2"
fs.zero_suppression = "test3"
fs.format = "test4"
assert fs.units == "test1"
assert fs.notation == "test2"
assert fs.zero_suppression == "test3"
assert fs.format == "test4"
def test_filesettings_dict_assign():
""" Test FileSettings dict-style attribute assignment
"""
fs = FileSettings()
fs["units"] = "metric"
fs["notation"] = "incremental"
fs["zero_suppression"] = "leading"
fs["format"] = (1, 2)
assert fs.units == "metric"
assert fs.notation == "incremental"
assert fs.zero_suppression == "leading"
assert fs.format == (1, 2)
def test_camfile_init():
""" Smoke test CamFile test
"""
cf = CamFile()
def test_camfile_settings():
""" Test CamFile Default Settings
"""
cf = CamFile()
assert cf.settings == FileSettings()
def test_bounds_override_smoketest():
cf = CamFile()
cf.bounds
def test_zeros():
""" Test zero/zero_suppression interaction
"""
fs = FileSettings()
assert fs.zero_suppression == "trailing"
assert fs.zeros == "leading"
fs["zero_suppression"] = "leading"
assert fs.zero_suppression == "leading"
assert fs.zeros == "trailing"
fs.zero_suppression = "trailing"
assert fs.zero_suppression == "trailing"
assert fs.zeros == "leading"
fs["zeros"] = "trailing"
assert fs.zeros == "trailing"
assert fs.zero_suppression == "leading"
fs.zeros = "leading"
assert fs.zeros == "leading"
assert fs.zero_suppression == "trailing"
fs = FileSettings(zeros="leading")
assert fs.zeros == "leading"
assert fs.zero_suppression == "trailing"
fs = FileSettings(zero_suppression="leading")
assert fs.zeros == "trailing"
assert fs.zero_suppression == "leading"
fs = FileSettings(zeros="leading", zero_suppression="trailing")
assert fs.zeros == "leading"
assert fs.zero_suppression == "trailing"
fs = FileSettings(zeros="trailing", zero_suppression="leading")
assert fs.zeros == "trailing"
assert fs.zero_suppression == "leading"
def test_filesettings_validation():
""" Test FileSettings constructor argument validation
"""
# absolute-ish is not a valid notation
pytest.raises(ValueError, FileSettings, "absolute-ish", "inch", None, (2, 5), None)
# degrees kelvin isn't a valid unit for a CAM file
pytest.raises(
ValueError, FileSettings, "absolute", "degrees kelvin", None, (2, 5), None
)
pytest.raises(
ValueError, FileSettings, "absolute", "inch", "leading", (2, 5), "leading"
)
# Technnically this should be an error, but Eangle files often do this incorrectly so we
# allow it
# pytest.raises(ValueError, FileSettings, 'absolute',
# 'inch', 'following', (2, 5), None)
pytest.raises(
ValueError, FileSettings, "absolute", "inch", None, (2, 5), "following"
)
pytest.raises(ValueError, FileSettings, "absolute", "inch", None, (2, 5, 6), None)
def test_key_validation():
fs = FileSettings()
pytest.raises(KeyError, fs.__getitem__, "octopus")
pytest.raises(KeyError, fs.__setitem__, "octopus", "do not care")
pytest.raises(ValueError, fs.__setitem__, "notation", "absolute-ish")
pytest.raises(ValueError, fs.__setitem__, "units", "degrees kelvin")
pytest.raises(ValueError, fs.__setitem__, "zero_suppression", "following")
pytest.raises(ValueError, fs.__setitem__, "zeros", "following")
pytest.raises(ValueError, fs.__setitem__, "format", (2, 5, 6))
|
3manuek/scikit-learn | refs/heads/master | sklearn/cluster/tests/test_dbscan.py | 114 | """
Tests for DBSCAN clustering algorithm
"""
import pickle
import numpy as np
from scipy.spatial import distance
from scipy import sparse
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_in
from sklearn.utils.testing import assert_not_in
from sklearn.cluster.dbscan_ import DBSCAN
from sklearn.cluster.dbscan_ import dbscan
from sklearn.cluster.tests.common import generate_clustered_data
from sklearn.metrics.pairwise import pairwise_distances
n_clusters = 3
X = generate_clustered_data(n_clusters=n_clusters)
def test_dbscan_similarity():
# Tests the DBSCAN algorithm with a similarity array.
# Parameters chosen specifically for this task.
eps = 0.15
min_samples = 10
# Compute similarities
D = distance.squareform(distance.pdist(X))
D /= np.max(D)
# Compute DBSCAN
core_samples, labels = dbscan(D, metric="precomputed", eps=eps,
min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - (1 if -1 in labels else 0)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(metric="precomputed", eps=eps, min_samples=min_samples)
labels = db.fit(D).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
def test_dbscan_feature():
# Tests the DBSCAN algorithm with a feature vector array.
# Parameters chosen specifically for this task.
# Different eps to other test, because distance is not normalised.
eps = 0.8
min_samples = 10
metric = 'euclidean'
# Compute DBSCAN
# parameters chosen for task
core_samples, labels = dbscan(X, metric=metric, eps=eps,
min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples)
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
def test_dbscan_sparse():
core_sparse, labels_sparse = dbscan(sparse.lil_matrix(X), eps=.8,
min_samples=10)
core_dense, labels_dense = dbscan(X, eps=.8, min_samples=10)
assert_array_equal(core_dense, core_sparse)
assert_array_equal(labels_dense, labels_sparse)
def test_dbscan_no_core_samples():
rng = np.random.RandomState(0)
X = rng.rand(40, 10)
X[X < .8] = 0
for X_ in [X, sparse.csr_matrix(X)]:
db = DBSCAN(min_samples=6).fit(X_)
assert_array_equal(db.components_, np.empty((0, X_.shape[1])))
assert_array_equal(db.labels_, -1)
assert_equal(db.core_sample_indices_.shape, (0,))
def test_dbscan_callable():
# Tests the DBSCAN algorithm with a callable metric.
# Parameters chosen specifically for this task.
# Different eps to other test, because distance is not normalised.
eps = 0.8
min_samples = 10
# metric is the function reference, not the string key.
metric = distance.euclidean
# Compute DBSCAN
# parameters chosen for task
core_samples, labels = dbscan(X, metric=metric, eps=eps,
min_samples=min_samples,
algorithm='ball_tree')
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples,
algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
def test_dbscan_balltree():
# Tests the DBSCAN algorithm with balltree for neighbor calculation.
eps = 0.8
min_samples = 10
D = pairwise_distances(X)
core_samples, labels = dbscan(D, metric="precomputed", eps=eps,
min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_1, n_clusters)
db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_2, n_clusters)
db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm='kd_tree')
labels = db.fit(X).labels_
n_clusters_3 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_3, n_clusters)
db = DBSCAN(p=1.0, eps=eps, min_samples=min_samples, algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_4 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_4, n_clusters)
db = DBSCAN(leaf_size=20, eps=eps, min_samples=min_samples,
algorithm='ball_tree')
labels = db.fit(X).labels_
n_clusters_5 = len(set(labels)) - int(-1 in labels)
assert_equal(n_clusters_5, n_clusters)
def test_input_validation():
# DBSCAN.fit should accept a list of lists.
X = [[1., 2.], [3., 4.]]
DBSCAN().fit(X) # must not raise exception
def test_dbscan_badargs():
# Test bad argument values: these should all raise ValueErrors
assert_raises(ValueError,
dbscan,
X, eps=-1.0)
assert_raises(ValueError,
dbscan,
X, algorithm='blah')
assert_raises(ValueError,
dbscan,
X, metric='blah')
assert_raises(ValueError,
dbscan,
X, leaf_size=-1)
assert_raises(ValueError,
dbscan,
X, p=-1)
def test_pickle():
obj = DBSCAN()
s = pickle.dumps(obj)
assert_equal(type(pickle.loads(s)), obj.__class__)
def test_boundaries():
# ensure min_samples is inclusive of core point
core, _ = dbscan([[0], [1]], eps=2, min_samples=2)
assert_in(0, core)
# ensure eps is inclusive of circumference
core, _ = dbscan([[0], [1], [1]], eps=1, min_samples=2)
assert_in(0, core)
core, _ = dbscan([[0], [1], [1]], eps=.99, min_samples=2)
assert_not_in(0, core)
def test_weighted_dbscan():
# ensure sample_weight is validated
assert_raises(ValueError, dbscan, [[0], [1]], sample_weight=[2])
assert_raises(ValueError, dbscan, [[0], [1]], sample_weight=[2, 3, 4])
# ensure sample_weight has an effect
assert_array_equal([], dbscan([[0], [1]], sample_weight=None,
min_samples=6)[0])
assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 5],
min_samples=6)[0])
assert_array_equal([0], dbscan([[0], [1]], sample_weight=[6, 5],
min_samples=6)[0])
assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[6, 6],
min_samples=6)[0])
# points within eps of each other:
assert_array_equal([0, 1], dbscan([[0], [1]], eps=1.5,
sample_weight=[5, 1], min_samples=6)[0])
# and effect of non-positive and non-integer sample_weight:
assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 0],
eps=1.5, min_samples=6)[0])
assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[5.9, 0.1],
eps=1.5, min_samples=6)[0])
assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[6, 0],
eps=1.5, min_samples=6)[0])
assert_array_equal([], dbscan([[0], [1]], sample_weight=[6, -1],
eps=1.5, min_samples=6)[0])
# for non-negative sample_weight, cores should be identical to repetition
rng = np.random.RandomState(42)
sample_weight = rng.randint(0, 5, X.shape[0])
core1, label1 = dbscan(X, sample_weight=sample_weight)
assert_equal(len(label1), len(X))
X_repeated = np.repeat(X, sample_weight, axis=0)
core_repeated, label_repeated = dbscan(X_repeated)
core_repeated_mask = np.zeros(X_repeated.shape[0], dtype=bool)
core_repeated_mask[core_repeated] = True
core_mask = np.zeros(X.shape[0], dtype=bool)
core_mask[core1] = True
assert_array_equal(np.repeat(core_mask, sample_weight), core_repeated_mask)
# sample_weight should work with precomputed distance matrix
D = pairwise_distances(X)
core3, label3 = dbscan(D, sample_weight=sample_weight,
metric='precomputed')
assert_array_equal(core1, core3)
assert_array_equal(label1, label3)
# sample_weight should work with estimator
est = DBSCAN().fit(X, sample_weight=sample_weight)
core4 = est.core_sample_indices_
label4 = est.labels_
assert_array_equal(core1, core4)
assert_array_equal(label1, label4)
est = DBSCAN()
label5 = est.fit_predict(X, sample_weight=sample_weight)
core5 = est.core_sample_indices_
assert_array_equal(core1, core5)
assert_array_equal(label1, label5)
assert_array_equal(label1, est.labels_)
def test_dbscan_core_samples_toy():
X = [[0], [2], [3], [4], [6], [8], [10]]
n_samples = len(X)
for algorithm in ['brute', 'kd_tree', 'ball_tree']:
# Degenerate case: every sample is a core sample, either with its own
# cluster or including other close core samples.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=1)
assert_array_equal(core_samples, np.arange(n_samples))
assert_array_equal(labels, [0, 1, 1, 1, 2, 3, 4])
# With eps=1 and min_samples=2 only the 3 samples from the denser area
# are core samples. All other points are isolated and considered noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=2)
assert_array_equal(core_samples, [1, 2, 3])
assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1])
# Only the sample in the middle of the dense area is core. Its two
# neighbors are edge samples. Remaining samples are noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=3)
assert_array_equal(core_samples, [2])
assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1])
# It's no longer possible to extract core samples with eps=1:
# everything is noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1,
min_samples=4)
assert_array_equal(core_samples, [])
assert_array_equal(labels, -np.ones(n_samples))
def test_dbscan_precomputed_metric_with_degenerate_input_arrays():
# see https://github.com/scikit-learn/scikit-learn/issues/4641 for
# more details
X = np.ones((10, 2))
labels = DBSCAN(eps=0.5, metric='precomputed').fit(X).labels_
assert_equal(len(set(labels)), 1)
X = np.zeros((10, 2))
labels = DBSCAN(eps=0.5, metric='precomputed').fit(X).labels_
assert_equal(len(set(labels)), 1)
|
harshilasu/LinkurApp | refs/heads/master | y/google-cloud-sdk/platform/gcutil/lib/google_compute_engine/gcutil_lib/thread_pool_test.py | 4 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for thread_pool."""
import path_initializer
path_initializer.InitSysPath()
import time
import unittest
from gcutil_lib import thread_pool
class TestOperation(thread_pool.Operation):
def __init__(self, raise_exception=False, sleep_time=0):
thread_pool.Operation.__init__(self)
self.raise_exception = raise_exception
self.sleep_time = sleep_time
def Run(self):
if self.sleep_time:
time.sleep(self.sleep_time)
if self.raise_exception:
raise Exception('Exception!')
return 42
class ThreadPoolTest(unittest.TestCase):
def testBasic(self):
"""Test basic start up and shutdown."""
tp = thread_pool.ThreadPool(3, 0.2)
tp.WaitShutdown()
def testSubmit(self):
tp = thread_pool.ThreadPool(3, 0.2)
ops = []
for _ in xrange(20):
op = TestOperation()
ops.append(op)
tp.Add(op)
tp.WaitShutdown()
for op in ops:
self.assertEqual(op.Result(), 42)
self.assertFalse(op.RaisedException())
def testLongOps(self):
tp = thread_pool.ThreadPool(3, 0.2)
ops = []
for _ in xrange(10):
op = TestOperation(sleep_time=0.1)
ops.append(op)
tp.Add(op)
tp.WaitShutdown()
for op in ops:
self.assertEqual(op.Result(), 42)
self.assertFalse(op.RaisedException())
def testExceptionOps(self):
tp = thread_pool.ThreadPool(3, 0.2)
ops = []
for _ in xrange(20):
op = TestOperation(raise_exception=0.1)
ops.append(op)
tp.Add(op)
tp.WaitShutdown()
for op in ops:
self.assertEqual(str(op.Result()), 'Exception!')
self.assertTrue(op.RaisedException())
if __name__ == '__main__':
unittest.main()
|
dvliman/jaikuengine | refs/heads/master | .google_appengine/google/appengine/api/prospective_search/error_pb.py | 27 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class Error(ProtocolBuffer.ProtocolMessage):
BAD_REQUEST = 1
INTERNAL_ERROR = 2
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
2: "INTERNAL_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.Error'
if _extension_runtime:
pass
__all__ = ['Error']
|
feliam/opaf | refs/heads/master | tests/filters_tst.py | 1 | import unittest
from opaflib import filters
class FiltersTest(unittest.TestCase):
random_strings = [ "AAAAAAAAA", "$#@%!#TYU$&#%^!@%THJDTKE%I$U^", "X"*65537 ]
def setUp(self):
pass
def tearDown(self):
pass
def _basicTest(self, flt, decode_tbl=[], encode_tbl=[], random_strings=[], decode_exception=[], encode_exception=[]):
#test specific decoding pairs
for coded, clear in decode_tbl:
self.assertEqual(clear, flt.decode(coded))
#test specific encoding pairs
for clear, coded in encode_tbl:
self.assertEqual(flt.decode(clear), coded)
#test x = dec(enc(x))
for clear in random_strings:
self.assertEqual(clear, flt.decode(flt.encode(clear)))
#Test strings that should not decode
for coded in decode_exception:
self.assertRaises(Exception, flt.decode, coded)
#Test strings that should not encode
for clear in encode_exception:
self.assertRaises(Exception, flt.encode, clear)
def testASCIIHexDecode(self):
decode_tbl = [
('61 62 2e6364 65', 'ab.cde'),
('61 62 2e6364 657', 'ab.cdep'),
('7', 'p')
]
decode_exception = ['61 62 2e6364 R 657', '$1' , '<><><><><><><' ]
flt = filters.ASCIIHexDecode()
self._basicTest(flt,decode_tbl,decode_exception=decode_exception,random_strings=self.random_strings)
def testASCII85Decode(self):
decode_tbl = [
('9jqo^BlbD-BleB1DJ+*+F(f,q', 'Man is distinguished'),
('E,9)oF*2M7/c~>', 'pleasure.')
]
flt = filters.ASCII85Decode()
self._basicTest(flt,decode_tbl,random_strings=self.random_strings)
def testFlatedecode(self):
decode_tbl = [
('Man is distinguished'.encode('zlib'), 'Man is distinguished')
]
flt = filters.FlateDecode()
self._basicTest(flt,decode_tbl,random_strings=self.random_strings)
def testRunLengthDecode(self):
decode_tbl = [
('\x05123456\xfa7\x04abcde\x80junk', '1234567777777abcde')
]
flt = filters.RunLengthDecode()
self._basicTest(flt,decode_tbl,random_strings=self.random_strings)
|
SwagColoredKitteh/servo | refs/heads/master | python/tidy/servo_tidy/tidy.py | 7 | # Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import contextlib
import fnmatch
import imp
import itertools
import json
import os
import re
import StringIO
import subprocess
import sys
import colorama
import toml
import yaml
from licenseck import MPL, APACHE, COPYRIGHT, licenses_toml, licenses_dep_toml
CONFIG_FILE_PATH = os.path.join(".", "servo-tidy.toml")
# Default configs
config = {
"skip-check-length": False,
"skip-check-licenses": False,
"check-ordered-json-keys": [],
"lint-scripts": [],
"blocked-packages": {},
"ignore": {
"files": [
os.path.join(".", "."), # ignore hidden files
],
"directories": [
os.path.join(".", "."), # ignore hidden directories
],
"packages": [],
},
"check_ext": {}
}
COMMENTS = ["// ", "# ", " *", "/* "]
# File patterns to include in the non-WPT tidy check.
FILE_PATTERNS_TO_CHECK = ["*.rs", "*.rc", "*.cpp", "*.c",
"*.h", "Cargo.lock", "*.py", "*.sh",
"*.toml", "*.webidl", "*.json", "*.html",
"*.yml"]
# File patterns that are ignored for all tidy and lint checks.
FILE_PATTERNS_TO_IGNORE = ["*.#*", "*.pyc", "fake-ld.sh"]
SPEC_BASE_PATH = "components/script/dom/"
WEBIDL_STANDARDS = [
"//www.khronos.org/registry/webgl/specs",
"//developer.mozilla.org/en-US/docs/Web/API",
"//dev.w3.org/2006/webapi",
"//dev.w3.org/csswg",
"//dev.w3.org/fxtf",
"//dvcs.w3.org/hg",
"//dom.spec.whatwg.org",
"//domparsing.spec.whatwg.org",
"//drafts.csswg.org",
"//drafts.fxtf.org",
"//encoding.spec.whatwg.org",
"//fetch.spec.whatwg.org",
"//html.spec.whatwg.org",
"//url.spec.whatwg.org",
"//xhr.spec.whatwg.org",
"//w3c.github.io",
"//heycam.github.io/webidl",
"//webbluetoothcg.github.io/web-bluetooth/",
"//svgwg.org/svg2-draft",
# Not a URL
"// This interface is entirely internal to Servo, and should not be" +
" accessible to\n// web pages."
]
def is_iter_empty(iterator):
try:
obj = iterator.next()
return True, itertools.chain((obj,), iterator)
except StopIteration:
return False, iterator
def normilize_paths(paths):
if isinstance(paths, basestring):
return os.path.join(*paths.split('/'))
else:
return [os.path.join(*path.split('/')) for path in paths]
# A simple wrapper for iterators to show progress
# (Note that it's inefficient for giant iterators, since it iterates once to get the upper bound)
def progress_wrapper(iterator):
list_of_stuff = list(iterator)
total_files, progress = len(list_of_stuff), 0
for idx, thing in enumerate(list_of_stuff):
progress = int(float(idx + 1) / total_files * 100)
sys.stdout.write('\r Progress: %s%% (%d/%d)' % (progress, idx + 1, total_files))
sys.stdout.flush()
yield thing
class FileList(object):
def __init__(self, directory, only_changed_files=False, exclude_dirs=[], progress=True):
self.directory = directory
self.excluded = exclude_dirs
iterator = self._git_changed_files() if only_changed_files else \
self._filter_excluded() if exclude_dirs else self._default_walk()
# Raise `StopIteration` if the iterator is empty
obj = next(iterator)
self.generator = itertools.chain((obj,), iterator)
if progress:
self.generator = progress_wrapper(self.generator)
def _default_walk(self):
for root, _, files in os.walk(self.directory):
for f in files:
yield os.path.join(root, f)
def _git_changed_files(self):
args = ["git", "log", "-n1", "--merges", "--format=%H"]
last_merge = subprocess.check_output(args).strip()
args = ["git", "diff", "--name-only", last_merge, self.directory]
file_list = normilize_paths(subprocess.check_output(args).splitlines())
for f in file_list:
if not any(os.path.join('.', os.path.dirname(f)).startswith(path) for path in self.excluded):
yield os.path.join('.', f)
def _filter_excluded(self):
for root, dirs, files in os.walk(self.directory, topdown=True):
# modify 'dirs' in-place so that we don't do unnecessary traversals in excluded directories
dirs[:] = [d for d in dirs if not any(os.path.join(root, d).startswith(name) for name in self.excluded)]
for rel_path in files:
yield os.path.join(root, rel_path)
def __iter__(self):
return self
def next(self):
return next(self.generator)
def filter_file(file_name):
if any(file_name.startswith(ignored_file) for ignored_file in config["ignore"]["files"]):
return False
base_name = os.path.basename(file_name)
if any(fnmatch.fnmatch(base_name, pattern) for pattern in FILE_PATTERNS_TO_IGNORE):
return False
return True
def filter_files(start_dir, only_changed_files, progress):
file_iter = FileList(start_dir, only_changed_files=only_changed_files,
exclude_dirs=config["ignore"]["directories"], progress=progress)
for file_name in file_iter:
base_name = os.path.basename(file_name)
if not any(fnmatch.fnmatch(base_name, pattern) for pattern in FILE_PATTERNS_TO_CHECK):
continue
if not filter_file(file_name):
continue
yield file_name
def uncomment(line):
for c in COMMENTS:
if line.startswith(c):
if line.endswith("*/"):
return line[len(c):(len(line) - 3)].strip()
return line[len(c):].strip()
def is_apache_licensed(header):
if APACHE in header:
return any(c in header for c in COPYRIGHT)
def check_license(file_name, lines):
if any(file_name.endswith(ext) for ext in (".yml", ".toml", ".lock", ".json", ".html")) or \
config["skip-check-licenses"]:
raise StopIteration
if lines[0].startswith("#!") and lines[1].strip():
yield (1, "missing blank line after shebang")
blank_lines = 0
max_blank_lines = 2 if lines[0].startswith("#!") else 1
license_block = []
for l in lines:
l = l.rstrip('\n')
if not l.strip():
blank_lines += 1
if blank_lines >= max_blank_lines:
break
continue
line = uncomment(l)
if line is not None:
license_block.append(line)
header = " ".join(license_block)
valid_license = MPL in header or is_apache_licensed(header)
acknowledged_bad_license = "xfail-license" in header
if not (valid_license or acknowledged_bad_license):
yield (1, "incorrect license")
def check_modeline(file_name, lines):
for idx, line in enumerate(lines[:5]):
if re.search('^.*[ \t](vi:|vim:|ex:)[ \t]', line):
yield (idx + 1, "vi modeline present")
elif re.search('-\*-.*-\*-', line, re.IGNORECASE):
yield (idx + 1, "emacs file variables present")
def check_length(file_name, idx, line):
if any(file_name.endswith(ext) for ext in (".yml", ".lock", ".json", ".html", ".toml")) or \
config["skip-check-length"]:
raise StopIteration
# Prefer shorter lines when shell scripting.
max_length = 80 if file_name.endswith(".sh") else 120
if len(line.rstrip('\n')) > max_length:
yield (idx + 1, "Line is longer than %d characters" % max_length)
def check_whatwg_specific_url(idx, line):
match = re.search(r"https://html\.spec\.whatwg\.org/multipage/[\w-]+\.html#([\w\:-]+)", line)
if match is not None:
preferred_link = "https://html.spec.whatwg.org/multipage/#{}".format(match.group(1))
yield (idx + 1, "link to WHATWG may break in the future, use this format instead: {}".format(preferred_link))
def check_whatwg_single_page_url(idx, line):
match = re.search(r"https://html\.spec\.whatwg\.org/#([\w\:-]+)", line)
if match is not None:
preferred_link = "https://html.spec.whatwg.org/multipage/#{}".format(match.group(1))
yield (idx + 1, "links to WHATWG single-page url, change to multi page: {}".format(preferred_link))
def check_whitespace(idx, line):
if line[-1] == "\n":
line = line[:-1]
else:
yield (idx + 1, "no newline at EOF")
if line.endswith(" "):
yield (idx + 1, "trailing whitespace")
if "\t" in line:
yield (idx + 1, "tab on line")
if "\r" in line:
yield (idx + 1, "CR on line")
def check_by_line(file_name, lines):
for idx, line in enumerate(lines):
errors = itertools.chain(
check_length(file_name, idx, line),
check_whitespace(idx, line),
check_whatwg_specific_url(idx, line),
check_whatwg_single_page_url(idx, line),
)
for error in errors:
yield error
def check_flake8(file_name, contents):
from flake8.main import check_code
if not file_name.endswith(".py"):
raise StopIteration
@contextlib.contextmanager
def stdout_redirect(where):
sys.stdout = where
try:
yield where
finally:
sys.stdout = sys.__stdout__
ignore = {
"W291", # trailing whitespace; the standard tidy process will enforce no trailing whitespace
"E501", # 80 character line length; the standard tidy process will enforce line length
}
output = StringIO.StringIO()
with stdout_redirect(output):
check_code(contents, ignore=ignore)
for error in output.getvalue().splitlines():
_, line_num, _, message = error.split(":", 3)
yield line_num, message.strip()
def check_lock(file_name, contents):
def find_reverse_dependencies(name, content):
for package in itertools.chain([content["root"]], content["package"]):
for dependency in package.get("dependencies", []):
if dependency.startswith("{} ".format(name)):
yield package["name"], dependency
if not file_name.endswith(".lock"):
raise StopIteration
# Package names to be neglected (as named by cargo)
exceptions = config["ignore"]["packages"]
content = toml.loads(contents)
packages_by_name = {}
for package in content.get("package", []):
source = package.get("source", "")
if source == r"registry+https://github.com/rust-lang/crates.io-index":
source = "crates.io"
packages_by_name.setdefault(package["name"], []).append((package["version"], source))
for (name, packages) in packages_by_name.iteritems():
if name in exceptions or len(packages) <= 1:
continue
message = "duplicate versions for package `{}`".format(name)
packages.sort()
packages_dependencies = list(find_reverse_dependencies(name, content))
for version, source in packages:
short_source = source.split("#")[0].replace("git+", "")
message += "\n\t\033[93mThe following packages depend on version {} from '{}':\033[0m" \
.format(version, short_source)
for name, dependency in packages_dependencies:
if version in dependency and short_source in dependency:
message += "\n\t\t" + name
yield (1, message)
# Check to see if we are transitively using any blocked packages
for package in content.get("package", []):
package_name = package.get("name")
package_version = package.get("version")
for dependency in package.get("dependencies", []):
dependency = dependency.split()
dependency_name = dependency[0]
whitelist = config['blocked-packages'].get(dependency_name)
if whitelist is not None:
if package_name not in whitelist:
fmt = "Package {} {} depends on blocked package {}."
message = fmt.format(package_name, package_version, dependency_name)
yield (1, message)
def check_toml(file_name, lines):
if not file_name.endswith("Cargo.toml"):
raise StopIteration
ok_licensed = False
for idx, line in enumerate(lines):
if idx == 0 and "[workspace]" in line:
raise StopIteration
if line.find("*") != -1:
yield (idx + 1, "found asterisk instead of minimum version number")
for license_line in licenses_toml:
ok_licensed |= (license_line in line)
if not ok_licensed:
yield (0, ".toml file should contain a valid license.")
def check_shell(file_name, lines):
if not file_name.endswith(".sh"):
raise StopIteration
shebang = "#!/usr/bin/env bash"
required_options = {"set -o errexit", "set -o nounset", "set -o pipefail"}
did_shebang_check = False
if not lines:
yield (0, 'script is an empty file')
return
if lines[0].rstrip() != shebang:
yield (1, 'script does not have shebang "{}"'.format(shebang))
for idx in range(1, len(lines)):
stripped = lines[idx].rstrip()
# Comments or blank lines are ignored. (Trailing whitespace is caught with a separate linter.)
if lines[idx].startswith("#") or stripped == "":
continue
if not did_shebang_check:
if stripped in required_options:
required_options.remove(stripped)
else:
# The first non-comment, non-whitespace, non-option line is the first "real" line of the script.
# The shebang, options, etc. must come before this.
if required_options:
formatted = ['"{}"'.format(opt) for opt in required_options]
yield (idx + 1, "script is missing options {}".format(", ".join(formatted)))
did_shebang_check = True
if "`" in stripped:
yield (idx + 1, "script should not use backticks for command substitution")
if " [ " in stripped or stripped.startswith("[ "):
yield (idx + 1, "script should use `[[` instead of `[` for conditional testing")
for dollar in re.finditer('\$', stripped):
next_idx = dollar.end()
if next_idx < len(stripped):
next_char = stripped[next_idx]
if not (next_char == '{' or next_char == '('):
yield(idx + 1, "variable substitutions should use the full \"${VAR}\" form")
def check_rust(file_name, lines):
if not file_name.endswith(".rs") or \
file_name.endswith(".mako.rs") or \
file_name.endswith(os.path.join("style", "build.rs")) or \
file_name.endswith(os.path.join("geckolib", "build.rs")) or \
file_name.endswith(os.path.join("unit", "style", "stylesheets.rs")):
raise StopIteration
comment_depth = 0
merged_lines = ''
import_block = False
whitespace = False
is_lib_rs_file = file_name.endswith("lib.rs")
prev_use = None
prev_open_brace = False
current_indent = 0
prev_crate = {}
prev_mod = {}
prev_feature_name = ""
decl_message = "{} is not in alphabetical order"
decl_expected = "\n\t\033[93mexpected: {}\033[0m"
decl_found = "\n\t\033[91mfound: {}\033[0m"
for idx, original_line in enumerate(lines):
# simplify the analysis
line = original_line.strip()
is_attribute = re.search(r"#\[.*\]", line)
is_comment = re.search(r"^//|^/\*|^\*", line)
# Simple heuristic to avoid common case of no comments.
if '/' in line:
comment_depth += line.count('/*')
comment_depth -= line.count('*/')
if line.endswith('\\'):
merged_lines += line[:-1]
continue
if comment_depth:
merged_lines += line
continue
if merged_lines:
line = merged_lines + line
merged_lines = ''
# Ignore attributes, comments, and imports
# Keep track of whitespace to enable checking for a merged import block
if import_block:
if not (is_comment or is_attribute or line.startswith("use ")):
whitespace = line == ""
if not whitespace:
import_block = False
# get rid of strings and chars because cases like regex expression, keep attributes
if not is_attribute:
line = re.sub(r'"(\\.|[^\\"])*?"', '""', line)
line = re.sub(r"'(\\.|[^\\'])*?'", "''", line)
# get rid of comments
line = re.sub('//.*?$|/\*.*?$|^\*.*?$', '//', line)
# get rid of attributes that do not contain =
line = re.sub('^#[A-Za-z0-9\(\)\[\]_]*?$', '#[]', line)
# flag this line if it matches one of the following regular expressions
# tuple format: (pattern, format_message, filter_function(match, line))
no_filter = lambda match, line: True
regex_rules = [
(r",[^\s]", "missing space after ,",
lambda match, line: '$' not in line and not is_attribute),
(r"([A-Za-z0-9_]+) (\()", "extra space after {0}",
lambda match, line: not (
is_attribute or
re.match(r"\bmacro_rules!\s+", line[:match.start()]) or
re.search(r"[^']'[A-Za-z0-9_]+ \($", line[:match.end()]) or
match.group(1) in ['const', 'fn', 'for', 'if', 'in',
'let', 'match', 'mut', 'return'])),
(r"[A-Za-z0-9\"]=", "missing space before =",
lambda match, line: is_attribute),
(r"=[A-Za-z0-9\"]", "missing space after =",
lambda match, line: is_attribute),
(r"^=\s", "no = in the beginning of line",
lambda match, line: not is_comment),
# ignore scientific notation patterns like 1e-6
(r"[A-DF-Za-df-z0-9]-", "missing space before -",
lambda match, line: not is_attribute),
(r"[A-Za-z0-9]([\+/\*%=])", "missing space before {0}",
lambda match, line: (not is_attribute and
not is_associated_type(match, line))),
# * not included because of dereferencing and casting
# - not included because of unary negation
(r'([\+/\%=])[A-Za-z0-9"]', "missing space after {0}",
lambda match, line: (not is_attribute and
not is_associated_type(match, line))),
(r"\)->", "missing space before ->", no_filter),
(r"->[A-Za-z]", "missing space after ->", no_filter),
(r"[^ ]=>", "missing space before =>", lambda match, line: match.start() != 0),
(r"=>[^ ]", "missing space after =>", lambda match, line: match.end() != len(line)),
(r"=> ", "extra space after =>", no_filter),
# ignore " ::crate::mod" and "trait Foo : Bar"
(r" :[^:]", "extra space before :",
lambda match, line: 'trait ' not in line[:match.start()]),
# ignore "crate::mod" and ignore flagging macros like "$t1:expr"
(r"[^:]:[A-Za-z0-9\"]", "missing space after :",
lambda match, line: '$' not in line[:match.end()]),
(r"[A-Za-z0-9\)]{", "missing space before {{", no_filter),
# ignore cases like "{}", "}`", "}}" and "use::std::{Foo, Bar}"
(r"[^\s{}]}[^`]", "missing space before }}",
lambda match, line: not re.match(r'^(pub )?use', line)),
# ignore cases like "{}", "`{", "{{" and "use::std::{Foo, Bar}"
(r"[^`]{[^\s{}]", "missing space after {{",
lambda match, line: not re.match(r'^(pub )?use', line)),
# There should not be any extra pointer dereferencing
(r": &Vec<", "use &[T] instead of &Vec<T>", no_filter),
# No benefit over using &str
(r": &String", "use &str instead of &String", no_filter),
# No benefit to using &Root<T>
(r": &Root<", "use &T instead of &Root<T>", no_filter),
(r"^&&", "operators should go at the end of the first line", no_filter),
(r"\{[A-Za-z0-9_]+\};", "use statement contains braces for single import",
lambda match, line: line.startswith('use ')),
(r"^\s*else {", "else braces should be on the same line", no_filter),
(r"[^$ ]\([ \t]", "extra space after (", no_filter),
# This particular pattern is not reentrant-safe in script_thread.rs
(r"match self.documents.borrow", "use a separate variable for the match expression",
lambda match, line: file_name.endswith('script_thread.rs')),
]
for pattern, message, filter_func in regex_rules:
for match in re.finditer(pattern, line):
if filter_func(match, line):
yield (idx + 1, message.format(*match.groups(), **match.groupdict()))
if prev_open_brace and not line:
yield (idx + 1, "found an empty line following a {")
prev_open_brace = line.endswith("{")
# check alphabetical order of extern crates
if line.startswith("extern crate "):
# strip "extern crate " from the begin and ";" from the end
crate_name = line[13:-1]
indent = len(original_line) - len(line)
if indent not in prev_crate:
prev_crate[indent] = ""
if prev_crate[indent] > crate_name:
yield(idx + 1, decl_message.format("extern crate declaration")
+ decl_expected.format(prev_crate[indent])
+ decl_found.format(crate_name))
prev_crate[indent] = crate_name
# check alphabetical order of feature attributes in lib.rs files
if is_lib_rs_file:
match = re.search(r"#!\[feature\((.*)\)\]", line)
if match:
features = map(lambda w: w.strip(), match.group(1).split(','))
sorted_features = sorted(features)
if sorted_features != features:
yield(idx + 1, decl_message.format("feature attribute")
+ decl_expected.format(tuple(sorted_features))
+ decl_found.format(tuple(features)))
if prev_feature_name > sorted_features[0]:
yield(idx + 1, decl_message.format("feature attribute")
+ decl_expected.format(prev_feature_name + " after " + sorted_features[0])
+ decl_found.format(prev_feature_name + " before " + sorted_features[0]))
prev_feature_name = sorted_features[0]
else:
# not a feature attribute line, so empty previous name
prev_feature_name = ""
# imports must be in the same line, alphabetically sorted, and merged
# into a single import block
if line.startswith("use "):
import_block = True
indent = len(original_line) - len(line)
if not line.endswith(";") and '{' in line:
yield (idx + 1, "use statement spans multiple lines")
# strip "use" from the begin and ";" from the end
current_use = line[4:-1]
if prev_use:
current_use_cut = current_use.replace("{self,", ".").replace("{", ".")
prev_use_cut = prev_use.replace("{self,", ".").replace("{", ".")
if indent == current_indent and current_use_cut < prev_use_cut:
yield(idx + 1, decl_message.format("use statement")
+ decl_expected.format(prev_use)
+ decl_found.format(current_use))
prev_use = current_use
current_indent = indent
if whitespace or not import_block:
current_indent = 0
# do not allow blank lines in an import block
if import_block and whitespace and line.startswith("use "):
whitespace = False
yield(idx, "encountered whitespace following a use statement")
# modules must be in the same line and alphabetically sorted
if line.startswith("mod ") or line.startswith("pub mod "):
indent = len(original_line) - len(line)
# strip /(pub )?mod/ from the left and ";" from the right
mod = line[4:-1] if line.startswith("mod ") else line[8:-1]
if (idx - 1) < 0 or "#[macro_use]" not in lines[idx - 1]:
match = line.find(" {")
if indent not in prev_mod:
prev_mod[indent] = ""
if match == -1 and not line.endswith(";"):
yield (idx + 1, "mod declaration spans multiple lines")
if prev_mod[indent] and mod < prev_mod[indent]:
yield(idx + 1, decl_message.format("mod declaration")
+ decl_expected.format(prev_mod[indent])
+ decl_found.format(mod))
prev_mod[indent] = mod
else:
# we now erase previous entries
prev_mod = {}
# Avoid flagging <Item=Foo> constructs
def is_associated_type(match, line):
if match.group(1) != '=':
return False
open_angle = line[0:match.end()].rfind('<')
close_angle = line[open_angle:].find('>') if open_angle != -1 else -1
generic_open = open_angle != -1 and open_angle < match.start()
generic_close = close_angle != -1 and close_angle + open_angle >= match.end()
return generic_open and generic_close
def check_webidl_spec(file_name, contents):
# Sorted by this function (in pseudo-Rust). The idea is to group the same
# organization together.
# fn sort_standards(a: &Url, b: &Url) -> Ordering {
# let a_domain = a.domain().split(".");
# a_domain.pop();
# a_domain.reverse();
# let b_domain = b.domain().split(".");
# b_domain.pop();
# b_domain.reverse();
# for i in a_domain.into_iter().zip(b_domain.into_iter()) {
# match i.0.cmp(b.0) {
# Less => return Less,
# Greater => return Greater,
# _ => (),
# }
# }
# a_domain.path().cmp(b_domain.path())
# }
if not file_name.endswith(".webidl"):
raise StopIteration
for i in WEBIDL_STANDARDS:
if contents.find(i) != -1:
raise StopIteration
yield (0, "No specification link found.")
def duplicate_key_yaml_constructor(loader, node, deep=False):
mapping = {}
for key_node, value_node in node.value:
key = loader.construct_object(key_node, deep=deep)
if key in mapping:
raise KeyError(key)
value = loader.construct_object(value_node, deep=deep)
mapping[key] = value
return loader.construct_mapping(node, deep)
def lint_buildbot_steps_yaml(mapping):
# Check for well-formedness of contents
# A well-formed buildbot_steps.yml should be a map to list of strings
for k in mapping.keys():
if not isinstance(mapping[k], list):
raise ValueError("Key '{}' maps to type '{}', but list expected".format(k, type(mapping[k]).__name__))
# check if value is a list of strings
for item in itertools.ifilter(lambda i: not isinstance(i, str), mapping[k]):
raise ValueError("List mapped to '{}' contains non-string element".format(k))
class SafeYamlLoader(yaml.SafeLoader):
"""Subclass of yaml.SafeLoader to avoid mutating the global SafeLoader."""
pass
def check_yaml(file_name, contents):
if not file_name.endswith("buildbot_steps.yml"):
raise StopIteration
# YAML specification doesn't explicitly disallow
# duplicate keys, but they shouldn't be allowed in
# buildbot_steps.yml as it could lead to confusion
SafeYamlLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
duplicate_key_yaml_constructor
)
try:
contents = yaml.load(contents, Loader=SafeYamlLoader)
lint_buildbot_steps_yaml(contents)
except yaml.YAMLError as e:
line = e.problem_mark.line + 1 if hasattr(e, 'problem_mark') else None
yield (line, e)
except KeyError as e:
yield (None, "Duplicated Key ({})".format(e.message))
except ValueError as e:
yield (None, e.message)
def check_for_possible_duplicate_json_keys(key_value_pairs):
keys = [x[0] for x in key_value_pairs]
seen_keys = set()
for key in keys:
if key in seen_keys:
raise KeyError("Duplicated Key (%s)" % key)
seen_keys.add(key)
def check_for_alphabetical_sorted_json_keys(key_value_pairs):
for a, b in zip(key_value_pairs[:-1], key_value_pairs[1:]):
if a[0] > b[0]:
raise KeyError("Unordered key (found %s before %s)" % (a[0], b[0]))
def check_json_requirements(filename):
def check_fn(key_value_pairs):
check_for_possible_duplicate_json_keys(key_value_pairs)
if filename in normilize_paths(config["check-ordered-json-keys"]):
check_for_alphabetical_sorted_json_keys(key_value_pairs)
return check_fn
def check_json(filename, contents):
if not filename.endswith(".json"):
raise StopIteration
try:
json.loads(contents, object_pairs_hook=check_json_requirements(filename))
except ValueError as e:
match = re.search(r"line (\d+) ", e.message)
line_no = match and match.group(1)
yield (line_no, e.message)
except KeyError as e:
yield (None, e.message)
def check_spec(file_name, lines):
if SPEC_BASE_PATH not in file_name:
raise StopIteration
file_name = os.path.relpath(os.path.splitext(file_name)[0], SPEC_BASE_PATH)
patt = re.compile("^\s*\/\/.+")
# Pattern representing a line with a macro
macro_patt = re.compile("^\s*\S+!(.*)$")
# Pattern representing a line with comment containing a spec link
link_patt = re.compile("^\s*///? https://.+$")
# Pattern representing a line with comment
comment_patt = re.compile("^\s*///?.+$")
brace_count = 0
in_impl = False
pattern = "impl {}Methods for {} {{".format(file_name, file_name)
for idx, line in enumerate(lines):
if "// check-tidy: no specs after this line" in line:
break
if not patt.match(line):
if pattern.lower() in line.lower():
in_impl = True
if ("fn " in line or macro_patt.match(line)) and brace_count == 1:
for up_idx in range(1, idx + 1):
up_line = lines[idx - up_idx]
if link_patt.match(up_line):
# Comment with spec link exists
break
if not comment_patt.match(up_line):
# No more comments exist above, yield warning
yield (idx + 1, "method declared in webidl is missing a comment with a specification link")
break
if '{' in line and in_impl:
brace_count += 1
if '}' in line and in_impl:
if brace_count == 1:
break
brace_count -= 1
def check_config_file(config_file, print_text=True):
# Check if config file exists
if not os.path.exists(config_file):
print("%s config file is required but was not found" % config_file)
sys.exit(1)
# Load configs from servo-tidy.toml
with open(config_file) as content:
conf_file = content.read()
lines = conf_file.splitlines(True)
if print_text:
print '\rChecking the config file...'
config_content = toml.loads(conf_file)
exclude = config_content.get("ignore", {})
# Check for invalid listed ignored directories
exclude_dirs = exclude.get("directories", [])
skip_dirs = ["./target", "./tests"]
invalid_dirs = [d for d in exclude_dirs if not os.path.isdir(d) and not any(s in d for s in skip_dirs)]
# Check for invalid listed ignored files
invalid_files = [f for f in exclude.get("files", []) if not os.path.exists(f)]
current_table = ""
for idx, line in enumerate(lines):
# Ignore comment lines
if line.strip().startswith("#"):
continue
# Check for invalid tables
if re.match("\[(.*?)\]", line.strip()):
table_name = re.findall(r"\[(.*?)\]", line)[0].strip()
if table_name not in ("configs", "blocked-packages", "ignore", "check_ext"):
yield config_file, idx + 1, "invalid config table [%s]" % table_name
current_table = table_name
continue
# Print invalid listed ignored directories
if current_table == "ignore" and invalid_dirs:
for d in invalid_dirs:
if line.strip().strip('\'",') == d:
yield config_file, idx + 1, "ignored directory '%s' doesn't exist" % d
invalid_dirs.remove(d)
break
# Print invalid listed ignored files
if current_table == "ignore" and invalid_files:
for f in invalid_files:
if line.strip().strip('\'",') == f:
yield config_file, idx + 1, "ignored file '%s' doesn't exist" % f
invalid_files.remove(f)
break
# Skip if there is no equal sign in line, assuming it's not a key
if "=" not in line:
continue
key = line.split("=")[0].strip()
# Check for invalid keys inside [configs] and [ignore] table
if (current_table == "configs" and key not in config or
current_table == "ignore" and key not in config["ignore"] or
# Any key outside of tables
current_table == ""):
yield config_file, idx + 1, "invalid config key '%s'" % key
# Parse config file
parse_config(config_content)
def parse_config(config_file):
exclude = config_file.get("ignore", {})
# Add list of ignored directories to config
config["ignore"]["directories"] += normilize_paths(exclude.get("directories", []))
# Add list of ignored files to config
config["ignore"]["files"] += normilize_paths(exclude.get("files", []))
# Add list of ignored packages to config
config["ignore"]["packages"] = exclude.get("packages", [])
# Add dict of dir, list of expected ext to config
dirs_to_check = config_file.get("check_ext", {})
# Fix the paths (OS-dependent)
for path, exts in dirs_to_check.items():
config['check_ext'][normilize_paths(path)] = exts
# Add list of blocked packages
config["blocked-packages"] = config_file.get("blocked-packages", {})
# Override default configs
user_configs = config_file.get("configs", [])
for pref in user_configs:
if pref in config:
config[pref] = user_configs[pref]
def check_directory_files(directories, print_text=True):
if print_text:
print '\rChecking directories for correct file extensions...'
for directory, file_extensions in directories.items():
files = sorted(os.listdir(directory))
for filename in files:
if not any(filename.endswith(ext) for ext in file_extensions):
details = {
"name": os.path.basename(filename),
"ext": ", ".join(file_extensions),
"dir_name": directory
}
message = '''Unexpected extension found for {name}. \
We only expect files with {ext} extensions in {dir_name}'''.format(**details)
yield (filename, 1, message)
def collect_errors_for_files(files_to_check, checking_functions, line_checking_functions, print_text=True):
(has_element, files_to_check) = is_iter_empty(files_to_check)
if not has_element:
raise StopIteration
if print_text:
print '\rChecking files for tidiness...'
for filename in files_to_check:
if not os.path.exists(filename):
continue
with open(filename, "r") as f:
contents = f.read()
if not contents.strip():
yield filename, 0, "file is empty"
continue
for check in checking_functions:
for error in check(filename, contents):
# the result will be: `(filename, line, message)`
yield (filename,) + error
lines = contents.splitlines(True)
for check in line_checking_functions:
for error in check(filename, lines):
yield (filename,) + error
def get_dep_toml_files(only_changed_files=False):
if not only_changed_files:
print '\nRunning the dependency licensing lint...'
for root, directories, filenames in os.walk(".cargo"):
for filename in filenames:
if filename == "Cargo.toml":
yield os.path.join(root, filename)
def check_dep_license_errors(filenames, progress=True):
filenames = progress_wrapper(filenames) if progress else filenames
for filename in filenames:
with open(filename, "r") as f:
ok_licensed = False
lines = f.readlines()
for idx, line in enumerate(lines):
for license_line in licenses_dep_toml:
ok_licensed |= (license_line in line)
if not ok_licensed:
yield (filename, 0, "dependency should contain a valid license.")
class LintRunner(object):
def __init__(self, lint_path=None, only_changed_files=True, exclude_dirs=[], progress=True):
self.only_changed_files = only_changed_files
self.exclude_dirs = exclude_dirs
self.progress = progress
self.path = lint_path
def check(self):
if not os.path.exists(self.path):
yield (self.path, 0, "file does not exist")
return
if not self.path.endswith('.py'):
yield (self.path, 0, "lint should be a python script")
return
dir_name, filename = os.path.split(self.path)
sys.path.append(dir_name)
module = imp.load_source(filename[:-3], self.path)
if hasattr(module, 'Lint'):
if issubclass(module.Lint, LintRunner):
lint = module.Lint(self.path, self.only_changed_files, self.exclude_dirs, self.progress)
for error in lint.run():
if not hasattr(error, '__iter__'):
yield (self.path, 1, "errors should be a tuple of (path, line, reason)")
return
yield error
else:
yield (self.path, 1, "class 'Lint' should inherit from 'LintRunner'")
else:
yield (self.path, 1, "script should contain a class named 'Lint'")
sys.path.remove(dir_name)
def get_files(self, path, **kwargs):
args = ['only_changed_files', 'exclude_dirs', 'progress']
kwargs = {k: kwargs.get(k, getattr(self, k)) for k in args}
return FileList(path, **kwargs)
def run(self):
yield (self.path, 0, "class 'Lint' should implement 'run' method")
def run_lint_scripts(only_changed_files=False, progress=True):
runner = LintRunner(only_changed_files=only_changed_files, progress=progress)
for path in config['lint-scripts']:
runner.path = path
for error in runner.check():
yield error
def check_commits(path='.'):
"""Gets all commits since the last merge."""
args = ['git', 'log', '-n1', '--merges', '--format=%H']
last_merge = subprocess.check_output(args, cwd=path).strip()
args = ['git', 'log', '{}..HEAD'.format(last_merge), '--format=%s']
commits = subprocess.check_output(args, cwd=path).lower().splitlines()
for commit in commits:
# .split() to only match entire words
if 'wip' in commit.split():
yield ('.', 0, 'no commits should contain WIP')
raise StopIteration
def scan(only_changed_files=False, progress=True):
# check config file for errors
config_errors = check_config_file(CONFIG_FILE_PATH)
# check directories contain expected files
directory_errors = check_directory_files(config['check_ext'])
# standard checks
files_to_check = filter_files('.', only_changed_files, progress)
checking_functions = (check_flake8, check_lock, check_webidl_spec, check_json, check_yaml)
line_checking_functions = (check_license, check_by_line, check_toml, check_shell,
check_rust, check_spec, check_modeline)
file_errors = collect_errors_for_files(files_to_check, checking_functions, line_checking_functions)
# check dependecy licenses
dep_license_errors = check_dep_license_errors(get_dep_toml_files(only_changed_files), progress)
# other lint checks
lint_errors = run_lint_scripts(only_changed_files, progress)
# check commits for WIP
commit_errors = check_commits()
# chain all the iterators
errors = itertools.chain(config_errors, directory_errors, file_errors, dep_license_errors, lint_errors,
commit_errors)
error = None
for error in errors:
colorama.init()
print "\r\033[94m{}\033[0m:\033[93m{}\033[0m: \033[91m{}\033[0m".format(*error)
print
if error is None:
colorama.init()
print "\033[92mtidy reported no errors.\033[0m"
return int(error is not None)
|
hkchenhongyi/django | refs/heads/master | django/contrib/messages/api.py | 512 | from django.contrib.messages import constants
from django.contrib.messages.storage import default_storage
from django.http import HttpRequest
__all__ = (
'add_message', 'get_messages',
'get_level', 'set_level',
'debug', 'info', 'success', 'warning', 'error',
'MessageFailure',
)
class MessageFailure(Exception):
pass
def add_message(request, level, message, extra_tags='', fail_silently=False):
"""
Attempts to add a message to the request using the 'messages' app.
"""
if not isinstance(request, HttpRequest):
raise TypeError("add_message() argument must be an HttpRequest object, "
"not '%s'." % request.__class__.__name__)
if hasattr(request, '_messages'):
return request._messages.add(level, message, extra_tags)
if not fail_silently:
raise MessageFailure('You cannot add messages without installing '
'django.contrib.messages.middleware.MessageMiddleware')
def get_messages(request):
"""
Returns the message storage on the request if it exists, otherwise returns
an empty list.
"""
if hasattr(request, '_messages'):
return request._messages
else:
return []
def get_level(request):
"""
Returns the minimum level of messages to be recorded.
The default level is the ``MESSAGE_LEVEL`` setting. If this is not found,
the ``INFO`` level is used.
"""
if hasattr(request, '_messages'):
storage = request._messages
else:
storage = default_storage(request)
return storage.level
def set_level(request, level):
"""
Sets the minimum level of messages to be recorded, returning ``True`` if
the level was recorded successfully.
If set to ``None``, the default level will be used (see the ``get_level``
method).
"""
if not hasattr(request, '_messages'):
return False
request._messages.level = level
return True
def debug(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``DEBUG`` level.
"""
add_message(request, constants.DEBUG, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def info(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``INFO`` level.
"""
add_message(request, constants.INFO, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def success(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``SUCCESS`` level.
"""
add_message(request, constants.SUCCESS, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def warning(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``WARNING`` level.
"""
add_message(request, constants.WARNING, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def error(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``ERROR`` level.
"""
add_message(request, constants.ERROR, message, extra_tags=extra_tags,
fail_silently=fail_silently)
|
ykalchevskiy/cookiecutter-yad | refs/heads/master | {{ cookiecutter.repo_name }}/{{ cookiecutter.project_name }}/{{ cookiecutter.project_name }}/settings/base.py | 1 | """
Django settings for {{ cookiecutter.project_name }} project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
Security: https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
Database: https://docs.djangoproject.com/en/dev/ref/settings/#databases
Internationalization: https://docs.djangoproject.com/en/dev/topics/i18n/
Static: https://docs.djangoproject.com/en/dev/howto/static-files/
"""
import os
from django.conf import global_settings # noqa
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse_lazy # noqa
def get_env_variable(var_name):
"""Get the environment variable or return an exception."""
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the {var_name} environment variable".format(
var_name=var_name
)
raise ImproperlyConfigured(error_msg)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
REPO_DIR = os.path.dirname(BASE_DIR)
SECRET_KEY = get_env_variable('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
DJANGO_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
LOCAL_APPS = (
'apps.{{ cookiecutter.app_name }}',
)
THIRD_PARTY = ()
INSTALLED_APPS = DJANGO_APPS + LOCAL_APPS + THIRD_PARTY
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = '{{ cookiecutter.project_name }}.urls'
WSGI_APPLICATION = '{{ cookiecutter.project_name }}.wsgi.application'
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'dev.db'),
}
}
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_ROOT = os.path.join(REPO_DIR, 'assets')
STATIC_URL = '/static/'
# Third party
|
qianwenming/mapnik | refs/heads/master | scons/scons-local-2.3.1/SCons/Tool/f95.py | 11 | """engine.SCons.Tool.f95
Tool-specific initialization for the generic Posix f95 Fortran compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/f95.py 2014/03/02 14:18:15 garyo"
import SCons.Defaults
import SCons.Tool
import SCons.Util
import fortran
from SCons.Tool.FortranCommon import add_all_to_env, add_f95_to_env
compilers = ['f95']
def generate(env):
add_all_to_env(env)
add_f95_to_env(env)
fcomp = env.Detect(compilers) or 'f95'
env['F95'] = fcomp
env['SHF95'] = fcomp
env['FORTRAN'] = fcomp
env['SHFORTRAN'] = fcomp
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
h2oai/h2o | refs/heads/master | py/testdir_single_jvm/test_exec2_cmp_many_cols.py | 9 | import unittest, random, sys, time
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e
print "Many cols, compare two data frames using exec =="
def write_syn_dataset(csvPathname, rowCount, colCount, SEED):
# 8 random generatators, 1 per column
r1 = random.Random(SEED)
dsf = open(csvPathname, "w+")
for i in range(rowCount):
rowData = []
for j in range(colCount):
r = r1.randint(0,1)
rowData.append(r)
rowDataCsv = ",".join(map(str,rowData))
dsf.write(rowDataCsv + "\n")
dsf.close()
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init(1,java_heap_GB=14)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_exec2_many_cols(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
tryList = [
(10, 10, 'cA', 200, 200),
(10, 1000, 'cB', 200, 200),
(10, 1000, 'cB', 200, 200),
# we timeout/fail on 500k? stop at 200k
# (10, 500000, 'cC', 200, 200),
# (10, 1000000, 'cD', 200, 360),
# (10, 1100000, 'cE', 60, 100),
# (10, 1200000, 'cF', 60, 120),
]
# h2b.browseTheCloud()
for (rowCount, colCount, hex_key, timeoutSecs, timeoutSecs2) in tryList:
SEEDPERFILE = random.randint(0, sys.maxint)
csvFilename = 'syn_' + str(SEEDPERFILE) + "_" + str(rowCount) + 'x' + str(colCount) + '.csv'
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
print "\nCreating random", csvPathname
write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE)
# import it N times and compare the N hex keys
REPEAT = 5
for i in range(REPEAT):
hex_key_i = hex_key + "_"+ str(i)
start = time.time()
parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=hex_key_i,
timeoutSecs=timeoutSecs, doSummary=False)
print "Parse:", parseResult['destination_key'], "took", time.time() - start, "seconds"
# We should be able to see the parse result?
start = time.time()
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], timeoutSecs=timeoutSecs2)
print "Inspect:", parseResult['destination_key'], "took", time.time() - start, "seconds"
h2o_cmd.infoFromInspect(inspect, csvPathname)
print "\n" + csvPathname, \
" numRows:", "{:,}".format(inspect['numRows']), \
" numCols:", "{:,}".format(inspect['numCols'])
# should match # of cols in header or ??
self.assertEqual(inspect['numCols'], colCount,
"parse created result with the wrong number of cols %s %s" % (inspect['numCols'], colCount))
self.assertEqual(inspect['numRows'], rowCount,
"parse created result with the wrong number of rows (header shouldn't count) %s %s" % \
(inspect['numRows'], rowCount))
# compare each to 0
for i in range(1,REPEAT):
hex_key_i = hex_key + "_" + str(i)
hex_key_0 = hex_key + "_0"
print "\nComparing %s to %s" % (hex_key_i, hex_key_0)
if 1==0:
execExpr = "%s[1,]+%s[1,]" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "%s[,1]+%s[,1]" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "%s+%s" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "%s!=%s" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "%s==%s" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "sum(%s==%s)" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "s=sum(%s==%s)" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "s=c(1); s=c(sum(%s==%s))" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "n=c(1); n=c(nrow(%s)*ncol(%s))" % (hex_key_0, hex_key_i)
resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
execExpr = "r=c(1); r=s==n"
resultExec, result, h2e.exec_expr(execExpr=execExpr, timeoutSecs=30)
print "result:", result
if __name__ == '__main__':
h2o.unit_main()
|
Ayub-Khan/edx-platform | refs/heads/master | common/test/acceptance/tests/discussion/test_cohorts.py | 137 | """
Tests related to the cohorting feature.
"""
from uuid import uuid4
from .helpers import BaseDiscussionMixin, BaseDiscussionTestCase
from .helpers import CohortTestMixin
from ..helpers import UniqueCourseTest
from ...pages.lms.auto_auth import AutoAuthPage
from ...fixtures.course import (CourseFixture, XBlockFixtureDesc)
from ...pages.lms.discussion import (DiscussionTabSingleThreadPage, InlineDiscussionThreadPage, InlineDiscussionPage)
from ...pages.lms.courseware import CoursewarePage
from nose.plugins.attrib import attr
class NonCohortedDiscussionTestMixin(BaseDiscussionMixin):
"""
Mixin for tests of discussion in non-cohorted courses.
"""
def setup_cohorts(self):
"""
No cohorts are desired for this mixin.
"""
pass
def test_non_cohort_visibility_label(self):
self.setup_thread(1)
self.assertEquals(self.thread_page.get_group_visibility_label(), "This post is visible to everyone.")
class CohortedDiscussionTestMixin(BaseDiscussionMixin, CohortTestMixin):
"""
Mixin for tests of discussion in cohorted courses.
"""
def setup_cohorts(self):
"""
Sets up the course to use cohorting with a single defined cohort.
"""
self.setup_cohort_config(self.course_fixture)
self.cohort_1_name = "Cohort 1"
self.cohort_1_id = self.add_manual_cohort(self.course_fixture, self.cohort_1_name)
def test_cohort_visibility_label(self):
# Must be moderator to view content in a cohort other than your own
AutoAuthPage(self.browser, course_id=self.course_id, roles="Moderator").visit()
self.thread_id = self.setup_thread(1, group_id=self.cohort_1_id)
self.assertEquals(
self.thread_page.get_group_visibility_label(),
"This post is visible only to {}.".format(self.cohort_1_name)
)
# Disable cohorts and verify that the post now shows as visible to everyone.
self.disable_cohorting(self.course_fixture)
self.refresh_thread_page(self.thread_id)
self.assertEquals(self.thread_page.get_group_visibility_label(), "This post is visible to everyone.")
class DiscussionTabSingleThreadTest(BaseDiscussionTestCase):
"""
Tests for the discussion page displaying a single thread.
"""
def setUp(self):
super(DiscussionTabSingleThreadTest, self).setUp()
self.setup_cohorts()
AutoAuthPage(self.browser, course_id=self.course_id).visit()
def setup_thread_page(self, thread_id):
self.thread_page = DiscussionTabSingleThreadPage(self.browser, self.course_id, self.discussion_id, thread_id) # pylint: disable=attribute-defined-outside-init
self.thread_page.visit()
# pylint: disable=unused-argument
def refresh_thread_page(self, thread_id):
self.browser.refresh()
self.thread_page.wait_for_page()
@attr('shard_5')
class CohortedDiscussionTabSingleThreadTest(DiscussionTabSingleThreadTest, CohortedDiscussionTestMixin):
"""
Tests for the discussion page displaying a single cohorted thread.
"""
# Actual test method(s) defined in CohortedDiscussionTestMixin.
pass
@attr('shard_5')
class NonCohortedDiscussionTabSingleThreadTest(DiscussionTabSingleThreadTest, NonCohortedDiscussionTestMixin):
"""
Tests for the discussion page displaying a single non-cohorted thread.
"""
# Actual test method(s) defined in NonCohortedDiscussionTestMixin.
pass
class InlineDiscussionTest(UniqueCourseTest):
"""
Tests for inline discussions
"""
def setUp(self):
super(InlineDiscussionTest, self).setUp()
self.discussion_id = "test_discussion_{}".format(uuid4().hex)
self.course_fixture = CourseFixture(**self.course_info).add_children(
XBlockFixtureDesc("chapter", "Test Section").add_children(
XBlockFixtureDesc("sequential", "Test Subsection").add_children(
XBlockFixtureDesc("vertical", "Test Unit").add_children(
XBlockFixtureDesc(
"discussion",
"Test Discussion",
metadata={"discussion_id": self.discussion_id}
)
)
)
)
).install()
self.setup_cohorts()
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id).visit().get_user_id()
def setup_thread_page(self, thread_id):
CoursewarePage(self.browser, self.course_id).visit()
self.show_thread(thread_id)
def show_thread(self, thread_id):
discussion_page = InlineDiscussionPage(self.browser, self.discussion_id)
discussion_page.expand_discussion()
self.assertEqual(discussion_page.get_num_displayed_threads(), 1)
self.thread_page = InlineDiscussionThreadPage(self.browser, thread_id) # pylint: disable=attribute-defined-outside-init
self.thread_page.expand()
def refresh_thread_page(self, thread_id):
self.browser.refresh()
self.show_thread(thread_id)
@attr('shard_5')
class CohortedInlineDiscussionTest(InlineDiscussionTest, CohortedDiscussionTestMixin):
"""
Tests for cohorted inline discussions.
"""
# Actual test method(s) defined in CohortedDiscussionTestMixin.
pass
@attr('shard_5')
class NonCohortedInlineDiscussionTest(InlineDiscussionTest, NonCohortedDiscussionTestMixin):
"""
Tests for non-cohorted inline discussions.
"""
# Actual test method(s) defined in NonCohortedDiscussionTestMixin.
pass
|
ryfeus/lambda-packs | refs/heads/master | Tensorflow_Pandas_Numpy/source3.6/setuptools/command/bdist_wininst.py | 991 | import distutils.command.bdist_wininst as orig
class bdist_wininst(orig.bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
orig.bdist_wininst.run(self)
finally:
self._is_running = False
|
sniemi/SamPy | refs/heads/master | sandbox/src1/stats.py | 1 | # Copyright (c) 1999-2007 Gary Strangman; All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Comments and/or additions are welcome (send e-mail to:
# strang@nmr.mgh.harvard.edu).
#
"""
stats.py module
(Requires pstat.py module.)
#################################################
####### Written by: Gary Strangman ###########
####### Last modified: Dec 18, 2007 ###########
#################################################
A collection of basic statistical functions for python. The function
names appear below.
IMPORTANT: There are really *3* sets of functions. The first set has an 'l'
prefix, which can be used with list or tuple arguments. The second set has
an 'a' prefix, which can accept NumPy array arguments. These latter
functions are defined only when NumPy is available on the system. The third
type has NO prefix (i.e., has the name that appears below). Functions of
this set are members of a "Dispatch" class, c/o David Ascher. This class
allows different functions to be called depending on the type of the passed
arguments. Thus, stats.mean is a member of the Dispatch class and
stats.mean(range(20)) will call stats.lmean(range(20)) while
stats.mean(Numeric.arange(20)) will call stats.amean(Numeric.arange(20)).
This is a handy way to keep consistent function names when different
argument types require different functions to be called. Having
implementated the Dispatch class, however, means that to get info on
a given function, you must use the REAL function name ... that is
"print stats.lmean.__doc__" or "print stats.amean.__doc__" work fine,
while "print stats.mean.__doc__" will print the doc for the Dispatch
class. NUMPY FUNCTIONS ('a' prefix) generally have more argument options
but should otherwise be consistent with the corresponding list functions.
Disclaimers: The function list is obviously incomplete and, worse, the
functions are not optimized. All functions have been tested (some more
so than others), but they are far from bulletproof. Thus, as with any
free software, no warranty or guarantee is expressed or implied. :-) A
few extra functions that don't appear in the list below can be found by
interested treasure-hunters. These functions don't necessarily have
both list and array versions but were deemed useful
CENTRAL TENDENCY: geometricmean
harmonicmean
mean
median
medianscore
mode
MOMENTS: moment
variation
skew
kurtosis
skewtest (for Numpy arrays only)
kurtosistest (for Numpy arrays only)
normaltest (for Numpy arrays only)
ALTERED VERSIONS: tmean (for Numpy arrays only)
tvar (for Numpy arrays only)
tmin (for Numpy arrays only)
tmax (for Numpy arrays only)
tstdev (for Numpy arrays only)
tsem (for Numpy arrays only)
describe
FREQUENCY STATS: itemfreq
scoreatpercentile
percentileofscore
histogram
cumfreq
relfreq
VARIABILITY: obrientransform
samplevar
samplestdev
signaltonoise (for Numpy arrays only)
var
stdev
sterr
sem
z
zs
zmap (for Numpy arrays only)
TRIMMING FCNS: threshold (for Numpy arrays only)
trimboth
trim1
round (round all vals to 'n' decimals; Numpy only)
CORRELATION FCNS: covariance (for Numpy arrays only)
correlation (for Numpy arrays only)
paired
pearsonr
spearmanr
pointbiserialr
kendalltau
linregress
INFERENTIAL STATS: ttest_1samp
ttest_ind
ttest_rel
chisquare
ks_2samp
mannwhitneyu
ranksums
wilcoxont
kruskalwallish
friedmanchisquare
PROBABILITY CALCS: chisqprob
erfcc
zprob
ksprob
fprob
betacf
gammln
betai
ANOVA FUNCTIONS: F_oneway
F_value
SUPPORT FUNCTIONS: writecc
incr
sign (for Numpy arrays only)
sum
cumsum
ss
summult
sumdiffsquared
square_of_sums
shellsort
rankdata
outputpairedstats
findwithin
"""
## CHANGE LOG:
## ===========
## 07-11.26 ... conversion for numpy started
## 07-05-16 ... added Lin's Concordance Correlation Coefficient (alincc) and acov
## 05-08-21 ... added "Dice's coefficient"
## 04-10-26 ... added ap2t(), an ugly fcn for converting p-vals to T-vals
## 04-04-03 ... added amasslinregress() function to do regression on N-D arrays
## 03-01-03 ... CHANGED VERSION TO 0.6
## fixed atsem() to properly handle limits=None case
## improved histogram and median functions (estbinwidth) and
## fixed atvar() function (wrong answers for neg numbers?!?)
## 02-11-19 ... fixed attest_ind and attest_rel for div-by-zero Overflows
## 02-05-10 ... fixed lchisqprob indentation (failed when df=even)
# match Python License, fixed doc string & imports
## 00-04-13 ... pulled all "global" statements, except from aanova()
## added/fixed lots of documentation, removed io.py dependency
## changed to version 0.5
## 99-11-13 ... added asign() function
## 99-11-01 ... changed version to 0.4 ... enough incremental changes now
## 99-10-25 ... added acovariance and acorrelation functions
## 99-10-10 ... fixed askew/akurtosis to avoid divide-by-zero errors
## added aglm function (crude, but will be improved)
## 99-10-04 ... upgraded acumsum, ass, asummult, asamplevar, avar, etc. to
## all handle lists of 'dimension's and keepdims
## REMOVED ar0, ar2, ar3, ar4 and replaced them with around
## reinserted fixes for abetai to avoid math overflows
## 99-09-05 ... rewrote achisqprob/aerfcc/aksprob/afprob/abetacf/abetai to
## handle multi-dimensional arrays (whew!)
## 99-08-30 ... fixed l/amoment, l/askew, l/akurtosis per D'Agostino (1990)
## added anormaltest per same reference
## re-wrote azprob to calc arrays of probs all at once
## 99-08-22 ... edited attest_ind printing section so arrays could be rounded
## 99-08-19 ... fixed amean and aharmonicmean for non-error(!) overflow on
## short/byte arrays (mean of #s btw 100-300 = -150??)
## 99-08-09 ... fixed asum so that the None case works for Byte arrays
## 99-08-08 ... fixed 7/3 'improvement' to handle t-calcs on N-D arrays
## 99-07-03 ... improved attest_ind, attest_rel (zero-division errortrap)
## 99-06-24 ... fixed bug(?) in attest_ind (n1=a.shape[0])
## 04/11/99 ... added asignaltonoise, athreshold functions, changed all
## max/min in array section to N.maximum/N.minimum,
## fixed square_of_sums to prevent integer overflow
## 04/10/99 ... !!! Changed function name ... sumsquared ==> square_of_sums
## 03/18/99 ... Added ar0, ar2, ar3 and ar4 rounding functions
## 02/28/99 ... Fixed aobrientransform to return an array rather than a list
## 01/15/99 ... Essentially ceased updating list-versions of functions (!!!)
## 01/13/99 ... CHANGED TO VERSION 0.3
## fixed bug in a/lmannwhitneyu p-value calculation
## 12/31/98 ... fixed variable-name bug in ldescribe
## 12/19/98 ... fixed bug in findwithin (fcns needed pstat. prefix)
## 12/16/98 ... changed amedianscore to return float (not array) for 1 score
## 12/14/98 ... added atmin and atmax functions
## removed umath from import line (not needed)
## l/ageometricmean modified to reduce chance of overflows (take
## nth root first, then multiply)
## 12/07/98 ... added __version__variable (now 0.2)
## removed all 'stats.' from anova() fcn
## 12/06/98 ... changed those functions (except shellsort) that altered
## arguments in-place ... cumsum, ranksort, ...
## updated (and fixed some) doc-strings
## 12/01/98 ... added anova() function (requires NumPy)
## incorporated Dispatch class
## 11/12/98 ... added functionality to amean, aharmonicmean, ageometricmean
## added 'asum' function (added functionality to N.add.reduce)
## fixed both moment and amoment (two errors)
## changed name of skewness and askewness to skew and askew
## fixed (a)histogram (which sometimes counted points <lowerlimit)
import pstat # required 3rd party module
import math, string, copy # required python modules
from types import *
__version__ = 0.6
############# DISPATCH CODE ##############
class Dispatch:
"""
The Dispatch class, care of David Ascher, allows different functions to
be called depending on the argument types. This way, there can be one
function name regardless of the argument type. To access function doc
in stats.py module, prefix the function with an 'l' or 'a' for list or
array arguments, respectively. That is, print stats.lmean.__doc__ or
print stats.amean.__doc__ or whatever.
"""
def __init__(self, *tuples):
self._dispatch = {}
for func, types in tuples:
for t in types:
if t in self._dispatch.keys():
raise ValueError, "can't have two dispatches on "+str(t)
self._dispatch[t] = func
self._types = self._dispatch.keys()
def __call__(self, arg1, *args, **kw):
if type(arg1) not in self._types:
raise TypeError, "don't know how to dispatch %s arguments" % type(arg1)
return apply(self._dispatch[type(arg1)], (arg1,) + args, kw)
##########################################################################
######################## LIST-BASED FUNCTIONS ########################
##########################################################################
### Define these regardless
####################################
####### CENTRAL TENDENCY #########
####################################
def lgeometricmean (inlist):
"""
Calculates the geometric mean of the values in the passed list.
That is: n-th root of (x1 * x2 * ... * xn). Assumes a '1D' list.
Usage: lgeometricmean(inlist)
"""
mult = 1.0
one_over_n = 1.0/len(inlist)
for item in inlist:
mult = mult * pow(item,one_over_n)
return mult
Calculates the harmonic mean of the values in the passed list.
That is: n / (1/x1 + 1/x2 + ... + 1/xn). Assumes a '1D' list.
Usage: lharmonicmean(inlist)
"""
sum = 0
for item in inlist:
sum = sum + 1.0/item
return len(inlist) / sum
def lmean (inlist):
"""
Returns the arithematic mean of the values in the passed list.
Assumes a '1D' list, but will function on the 1st dim of an array(!).
Usage: lmean(inlist)
"""
sum = 0
for item in inlist:
sum = sum + item
return sum/float(len(inlist))
def lmedian (inlist,numbins=1000):
"""
Returns the computed median value of a list of numbers, given the
number of bins to use for the histogram (more bins brings the computed value
closer to the median score, default number of bins = 1000). See G.W.
Heiman's Basic Stats (1st Edition), or CRC Probability & Statistics.
Usage: lmedian (inlist, numbins=1000)
"""
(hist, smallest, binsize, extras) = histogram(inlist,numbins,[min(inlist),max(inlist)]) # make histog
cumhist = cumsum(hist) # make cumulative histogram
for i in range(len(cumhist)): # get 1st(!) index holding 50%ile score
if cumhist[i]>=len(inlist)/2.0:
cfbin = i
break
LRL = smallest + binsize*cfbin # get lower read limit of that bin
cfbelow = cumhist[cfbin-1]
freq = float(hist[cfbin]) # frequency IN the 50%ile bin
median = LRL + ((len(inlist)/2.0 - cfbelow)/float(freq))*binsize # median formula
return median
def lmedianscore (inlist):
"""
Returns the 'middle' score of the passed list. If there is an even
number of scores, the mean of the 2 middle scores is returned.
Usage: lmedianscore(inlist)
"""
newlist = copy.deepcopy(inlist)
newlist.sort()
if len(newlist) % 2 == 0: # if even number of scores, average middle 2
index = len(newlist)/2 # integer division correct
median = float(newlist[index] + newlist[index-1]) /2
else:
index = len(newlist)/2 # int divsion gives mid value when count from 0
median = newlist[index]
return median
def lmode(inlist):
"""
Returns a list of the modal (most common) score(s) in the passed
list. If there is more than one such score, all are returned. The
bin-count for the mode(s) is also returned.
Usage: lmode(inlist)
Returns: bin-count for mode(s), a list of modal value(s)
"""
scores = pstat.unique(inlist)
scores.sort()
freq = []
for item in scores:
freq.append(inlist.count(item))
maxfreq = max(freq)
mode = []
stillmore = 1
while stillmore:
try:
indx = freq.index(maxfreq)
mode.append(scores[indx])
del freq[indx]
del scores[indx]
except ValueError:
stillmore=0
return maxfreq, mode
####################################
############ MOMENTS #############
####################################
def lmoment(inlist,moment=1):
"""
Calculates the nth moment about the mean for a sample (defaults to
the 1st moment). Used to calculate coefficients of skewness and kurtosis.
Usage: lmoment(inlist,moment=1)
Returns: appropriate moment (r) from ... 1/n * SUM((inlist(i)-mean)**r)
"""
if moment == 1:
return 0.0
else:
mn = mean(inlist)
n = len(inlist)
s = 0
for x in inlist:
s = s + (x-mn)**moment
return s/float(n)
def lvariation(inlist):
"""
Returns the coefficient of variation, as defined in CRC Standard
Probability and Statistics, p.6.
Usage: lvariation(inlist)
"""
return 100.0*samplestdev(inlist)/float(mean(inlist))
def lskew(inlist):
"""
Returns the skewness of a distribution, as defined in Numerical
Recipies (alternate defn in CRC Standard Probability and Statistics, p.6.)
Usage: lskew(inlist)
"""
return moment(inlist,3)/pow(moment(inlist,2),1.5)
def lkurtosis(inlist):
"""
Returns the kurtosis of a distribution, as defined in Numerical
Recipies (alternate defn in CRC Standard Probability and Statistics, p.6.)
Usage: lkurtosis(inlist)
"""
return moment(inlist,4)/pow(moment(inlist,2),2.0)
def ldescribe(inlist):
"""
Returns some descriptive statistics of the passed list (assumed to be 1D).
Usage: ldescribe(inlist)
Returns: n, mean, standard deviation, skew, kurtosis
"""
n = len(inlist)
mm = (min(inlist),max(inlist))
m = mean(inlist)
sd = stdev(inlist)
sk = skew(inlist)
kurt = kurtosis(inlist)
return n, mm, m, sd, sk, kurt
####################################
####### FREQUENCY STATS ##########
####################################
def litemfreq(inlist):
"""
Returns a list of pairs. Each pair consists of one of the scores in inlist
and it's frequency count. Assumes a 1D list is passed.
Usage: litemfreq(inlist)
Returns: a 2D frequency table (col [0:n-1]=scores, col n=frequencies)
"""
scores = pstat.unique(inlist)
scores.sort()
freq = []
for item in scores:
freq.append(inlist.count(item))
return pstat.abut(scores, freq)
def lscoreatpercentile (inlist, percent):
"""
Returns the score at a given percentile relative to the distribution
given by inlist.
Usage: lscoreatpercentile(inlist,percent)
"""
if percent > 1:
print "\nDividing percent>1 by 100 in lscoreatpercentile().\n"
percent = percent / 100.0
targetcf = percent*len(inlist)
h, lrl, binsize, extras = histogram(inlist)
cumhist = cumsum(copy.deepcopy(h))
for i in range(len(cumhist)):
if cumhist[i] >= targetcf:
break
score = binsize * ((targetcf - cumhist[i-1]) / float(h[i])) + (lrl+binsize*i)
return score
def lpercentileofscore (inlist, score,histbins=10,defaultlimits=None):
"""
Returns the percentile value of a score relative to the distribution
given by inlist. Formula depends on the values used to histogram the data(!).
Usage: lpercentileofscore(inlist,score,histbins=10,defaultlimits=None)
"""
h, lrl, binsize, extras = histogram(inlist,histbins,defaultlimits)
cumhist = cumsum(copy.deepcopy(h))
i = int((score - lrl)/float(binsize))
pct = (cumhist[i-1]+((score-(lrl+binsize*i))/float(binsize))*h[i])/float(len(inlist)) * 100
return pct
def lhistogram (inlist,numbins=10,defaultreallimits=None,printextras=0):
"""
Returns (i) a list of histogram bin counts, (ii) the smallest value
of the histogram binning, and (iii) the bin width (the last 2 are not
necessarily integers). Default number of bins is 10. If no sequence object
is given for defaultreallimits, the routine picks (usually non-pretty) bins
spanning all the numbers in the inlist.
Usage: lhistogram (inlist, numbins=10, defaultreallimits=None,suppressoutput=0)
Returns: list of bin values, lowerreallimit, binsize, extrapoints
"""
if (defaultreallimits <> None):
if type(defaultreallimits) not in [ListType,TupleType] or len(defaultreallimits)==1: # only one limit given, assumed to be lower one & upper is calc'd
lowerreallimit = defaultreallimits
upperreallimit = 1.000001 * max(inlist)
else: # assume both limits given
lowerreallimit = defaultreallimits[0]
upperreallimit = defaultreallimits[1]
binsize = (upperreallimit-lowerreallimit)/float(numbins)
else: # no limits given for histogram, both must be calc'd
estbinwidth=(max(inlist)-min(inlist))/float(numbins) +1e-6 #1=>cover all
binsize = ((max(inlist)-min(inlist)+estbinwidth))/float(numbins)
lowerreallimit = min(inlist) - binsize/2 #lower real limit,1st bin
bins = [0]*(numbins)
extrapoints = 0
for num in inlist:
try:
if (num-lowerreallimit) < 0:
extrapoints = extrapoints + 1
else:
bintoincrement = int((num-lowerreallimit)/float(binsize))
bins[bintoincrement] = bins[bintoincrement] + 1
except:
extrapoints = extrapoints + 1
if (extrapoints > 0 and printextras == 1):
print '\nPoints outside given histogram range =',extrapoints
return (bins, lowerreallimit, binsize, extrapoints)
def lcumfreq(inlist,numbins=10,defaultreallimits=None):
"""
Returns a cumulative frequency histogram, using the histogram function.
Usage: lcumfreq(inlist,numbins=10,defaultreallimits=None)
Returns: list of cumfreq bin values, lowerreallimit, binsize, extrapoints
"""
h,l,b,e = histogram(inlist,numbins,defaultreallimits)
cumhist = cumsum(copy.deepcopy(h))
return cumhist,l,b,e
def lrelfreq(inlist,numbins=10,defaultreallimits=None):
"""
Returns a relative frequency histogram, using the histogram function.
Usage: lrelfreq(inlist,numbins=10,defaultreallimits=None)
Returns: list of cumfreq bin values, lowerreallimit, binsize, extrapoints
"""
h,l,b,e = histogram(inlist,numbins,defaultreallimits)
for i in range(len(h)):
h[i] = h[i]/float(len(inlist))
return h,l,b,e
####################################
##### VARIABILITY FUNCTIONS ######
####################################
def lobrientransform(*args):
"""
Computes a transform on input data (any number of columns). Used to
test for homogeneity of variance prior to running one-way stats. From
Maxwell and Delaney, p.112.
Usage: lobrientransform(*args)
Returns: transformed data for use in an ANOVA
"""
TINY = 1e-10
k = len(args)
n = [0.0]*k
v = [0.0]*k
m = [0.0]*k
nargs = []
for i in range(k):
nargs.append(copy.deepcopy(args[i]))
n[i] = float(len(nargs[i]))
v[i] = var(nargs[i])
m[i] = mean(nargs[i])
for j in range(k):
for i in range(n[j]):
t1 = (n[j]-1.5)*n[j]*(nargs[j][i]-m[j])**2
t2 = 0.5*v[j]*(n[j]-1.0)
t3 = (n[j]-1.0)*(n[j]-2.0)
nargs[j][i] = (t1-t2) / float(t3)
check = 1
for j in range(k):
if v[j] - mean(nargs[j]) > TINY:
check = 0
if check <> 1:
raise ValueError, 'Problem in obrientransform.'
else:
return nargs
def lsamplevar (inlist):
"""
Returns the variance of the values in the passed list using
N for the denominator (i.e., DESCRIBES the sample variance only).
Usage: lsamplevar(inlist)
"""
n = len(inlist)
mn = mean(inlist)
deviations = []
for item in inlist:
deviations.append(item-mn)
return ss(deviations)/float(n)
def lsamplestdev (inlist):
"""
Returns the standard deviation of the values in the passed list using
N for the denominator (i.e., DESCRIBES the sample stdev only).
Usage: lsamplestdev(inlist)
"""
return math.sqrt(samplevar(inlist))
def lcov (x,y, keepdims=0):
"""
Returns the estimated covariance of the values in the passed
array (i.e., N-1). Dimension can equal None (ravel array first), an
integer (the dimension over which to operate), or a sequence (operate
over multiple dimensions). Set keepdims=1 to return an array with the
same number of dimensions as inarray.
Usage: lcov(x,y,keepdims=0)
"""
n = len(x)
xmn = mean(x)
ymn = mean(y)
xdeviations = [0]*len(x)
ydeviations = [0]*len(y)
for i in range(len(x)):
xdeviations[i] = x[i] - xmn
ydeviations[i] = y[i] - ymn
ss = 0.0
for i in range(len(xdeviations)):
ss = ss + xdeviations[i]*ydeviations[i]
return ss/float(n-1)
def lvar (inlist):
"""
Returns the variance of the values in the passed list using N-1
for the denominator (i.e., for estimating population variance).
Usage: lvar(inlist)
"""
n = len(inlist)
mn = mean(inlist)
deviations = [0]*len(inlist)
for i in range(len(inlist)):
deviations[i] = inlist[i] - mn
return ss(deviations)/float(n-1)
def lstdev (inlist):
"""
Returns the standard deviation of the values in the passed list
using N-1 in the denominator (i.e., to estimate population stdev).
Usage: lstdev(inlist)
"""
return math.sqrt(var(inlist))
def lsterr(inlist):
"""
Returns the standard error of the values in the passed list using N-1
in the denominator (i.e., to estimate population standard error).
Usage: lsterr(inlist)
"""
return stdev(inlist) / float(math.sqrt(len(inlist)))
def lsem (inlist):
"""
Returns the estimated standard error of the mean (sx-bar) of the
values in the passed list. sem = stdev / sqrt(n)
Usage: lsem(inlist)
"""
sd = stdev(inlist)
n = len(inlist)
return sd/math.sqrt(n)
def lz (inlist, score):
"""
Returns the z-score for a given input score, given that score and the
list from which that score came. Not appropriate for population calculations.
Usage: lz(inlist, score)
"""
z = (score-mean(inlist))/samplestdev(inlist)
return z
def lzs (inlist):
"""
Returns a list of z-scores, one for each score in the passed list.
Usage: lzs(inlist)
"""
zscores = []
for item in inlist:
zscores.append(z(inlist,item))
return zscores
####################################
####### TRIMMING FUNCTIONS #######
####################################
def ltrimboth (l,proportiontocut):
"""
Slices off the passed proportion of items from BOTH ends of the passed
list (i.e., with proportiontocut=0.1, slices 'leftmost' 10% AND 'rightmost'
10% of scores. Assumes list is sorted by magnitude. Slices off LESS if
proportion results in a non-integer slice index (i.e., conservatively
slices off proportiontocut).
Usage: ltrimboth (l,proportiontocut)
Returns: trimmed version of list l
"""
lowercut = int(proportiontocut*len(l))
uppercut = len(l) - lowercut
return l[lowercut:uppercut]
def ltrim1 (l,proportiontocut,tail='right'):
"""
Slices off the passed proportion of items from ONE end of the passed
list (i.e., if proportiontocut=0.1, slices off 'leftmost' or 'rightmost'
10% of scores). Slices off LESS if proportion results in a non-integer
slice index (i.e., conservatively slices off proportiontocut).
Usage: ltrim1 (l,proportiontocut,tail='right') or set tail='left'
Returns: trimmed version of list l
"""
if tail == 'right':
lowercut = 0
uppercut = len(l) - int(proportiontocut*len(l))
elif tail == 'left':
lowercut = int(proportiontocut*len(l))
uppercut = len(l)
return l[lowercut:uppercut]
####################################
##### CORRELATION FUNCTIONS ######
####################################
def lpaired(x,y):
"""
Interactively determines the type of data and then runs the
appropriated statistic for paired group data.
Usage: lpaired(x,y)
Returns: appropriate statistic name, value, and probability
"""
samples = ''
while samples not in ['i','r','I','R','c','C']:
print '\nIndependent or related samples, or correlation (i,r,c): ',
samples = raw_input()
if samples in ['i','I','r','R']:
print '\nComparing variances ...',
# USE O'BRIEN'S TEST FOR HOMOGENEITY OF VARIANCE, Maxwell & delaney, p.112
r = obrientransform(x,y)
f,p = F_oneway(pstat.colex(r,0),pstat.colex(r,1))
if p<0.05:
vartype='unequal, p='+str(round(p,4))
else:
vartype='equal'
print vartype
if samples in ['i','I']:
if vartype[0]=='e':
t,p = ttest_ind(x,y,0)
print '\nIndependent samples t-test: ', round(t,4),round(p,4)
else:
if len(x)>20 or len(y)>20:
z,p = ranksums(x,y)
print '\nRank Sums test (NONparametric, n>20): ', round(z,4),round(p,4)
else:
u,p = mannwhitneyu(x,y)
print '\nMann-Whitney U-test (NONparametric, ns<20): ', round(u,4),round(p,4)
else: # RELATED SAMPLES
if vartype[0]=='e':
t,p = ttest_rel(x,y,0)
print '\nRelated samples t-test: ', round(t,4),round(p,4)
else:
t,p = ranksums(x,y)
print '\nWilcoxon T-test (NONparametric): ', round(t,4),round(p,4)
else: # CORRELATION ANALYSIS
corrtype = ''
while corrtype not in ['c','C','r','R','d','D']:
print '\nIs the data Continuous, Ranked, or Dichotomous (c,r,d): ',
corrtype = raw_input()
if corrtype in ['c','C']:
m,b,r,p,see = linregress(x,y)
print '\nLinear regression for continuous variables ...'
lol = [['Slope','Intercept','r','Prob','SEestimate'],[round(m,4),round(b,4),round(r,4),round(p,4),round(see,4)]]
pstat.printcc(lol)
elif corrtype in ['r','R']:
r,p = spearmanr(x,y)
print '\nCorrelation for ranked variables ...'
print "Spearman's r: ",round(r,4),round(p,4)
else: # DICHOTOMOUS
r,p = pointbiserialr(x,y)
print '\nAssuming x contains a dichotomous variable ...'
print 'Point Biserial r: ',round(r,4),round(p,4)
print '\n\n'
return None
def lpearsonr(x,y):
"""
Calculates a Pearson correlation coefficient and the associated
probability value. Taken from Heiman's Basic Statistics for the Behav.
Sci (2nd), p.195.
Usage: lpearsonr(x,y) where x and y are equal-length lists
Returns: Pearson's r value, two-tailed p-value
"""
TINY = 1.0e-30
if len(x) <> len(y):
raise ValueError, 'Input values not paired in pearsonr. Aborting.'
n = len(x)
x = map(float,x)
y = map(float,y)
xmean = mean(x)
ymean = mean(y)
r_num = n*(summult(x,y)) - sum(x)*sum(y)
r_den = math.sqrt((n*ss(x) - square_of_sums(x))*(n*ss(y)-square_of_sums(y)))
r = (r_num / r_den) # denominator already a float
df = n-2
t = r*math.sqrt(df/((1.0-r+TINY)*(1.0+r+TINY)))
prob = betai(0.5*df,0.5,df/float(df+t*t))
return r, prob
def llincc(x,y):
"""
Calculates Lin's concordance correlation coefficient.
Usage: alincc(x,y) where x, y are equal-length arrays
Returns: Lin's CC
"""
covar = lcov(x,y)*(len(x)-1)/float(len(x)) # correct denom to n
xvar = lvar(x)*(len(x)-1)/float(len(x)) # correct denom to n
yvar = lvar(y)*(len(y)-1)/float(len(y)) # correct denom to n
lincc = (2 * covar) / ((xvar+yvar) +((amean(x)-amean(y))**2))
return lincc
def lspearmanr(x,y):
"""
Calculates a Spearman rank-order correlation coefficient. Taken
from Heiman's Basic Statistics for the Behav. Sci (1st), p.192.
Usage: lspearmanr(x,y) where x and y are equal-length lists
Returns: Spearman's r, two-tailed p-value
"""
TINY = 1e-30
if len(x) <> len(y):
raise ValueError, 'Input values not paired in spearmanr. Aborting.'
n = len(x)
rankx = rankdata(x)
ranky = rankdata(y)
dsq = sumdiffsquared(rankx,ranky)
rs = 1 - 6*dsq / float(n*(n**2-1))
t = rs * math.sqrt((n-2) / ((rs+1.0)*(1.0-rs)))
df = n-2
probrs = betai(0.5*df,0.5,df/(df+t*t)) # t already a float
# probability values for rs are from part 2 of the spearman function in
# Numerical Recipies, p.510. They are close to tables, but not exact. (?)
return rs, probrs
def lpointbiserialr(x,y):
"""
Calculates a point-biserial correlation coefficient and the associated
probability value. Taken from Heiman's Basic Statistics for the Behav.
Sci (1st), p.194.
Usage: lpointbiserialr(x,y) where x,y are equal-length lists
Returns: Point-biserial r, two-tailed p-value
"""
TINY = 1e-30
if len(x) <> len(y):
raise ValueError, 'INPUT VALUES NOT PAIRED IN pointbiserialr. ABORTING.'
data = pstat.abut(x,y)
categories = pstat.unique(x)
if len(categories) <> 2:
raise ValueError, "Exactly 2 categories required for pointbiserialr()."
else: # there are 2 categories, continue
codemap = pstat.abut(categories,range(2))
recoded = pstat.recode(data,codemap,0)
x = pstat.linexand(data,0,categories[0])
y = pstat.linexand(data,0,categories[1])
xmean = mean(pstat.colex(x,1))
ymean = mean(pstat.colex(y,1))
n = len(data)
adjust = math.sqrt((len(x)/float(n))*(len(y)/float(n)))
rpb = (ymean - xmean)/samplestdev(pstat.colex(data,1))*adjust
df = n-2
t = rpb*math.sqrt(df/((1.0-rpb+TINY)*(1.0+rpb+TINY)))
prob = betai(0.5*df,0.5,df/(df+t*t)) # t already a float
return rpb, prob
def lkendalltau(x,y):
"""
Calculates Kendall's tau ... correlation of ordinal data. Adapted
from function kendl1 in Numerical Recipies. Needs good test-routine.@@@
Usage: lkendalltau(x,y)
Returns: Kendall's tau, two-tailed p-value
"""
n1 = 0
n2 = 0
iss = 0
for j in range(len(x)-1):
for k in range(j,len(y)):
a1 = x[j] - x[k]
a2 = y[j] - y[k]
aa = a1 * a2
if (aa): # neither list has a tie
n1 = n1 + 1
n2 = n2 + 1
if aa > 0:
iss = iss + 1
else:
iss = iss -1
else:
if (a1):
n1 = n1 + 1
else:
n2 = n2 + 1
tau = iss / math.sqrt(n1*n2)
svar = (4.0*len(x)+10.0) / (9.0*len(x)*(len(x)-1))
z = tau / math.sqrt(svar)
prob = erfcc(abs(z)/1.4142136)
return tau, prob
def llinregress(x,y):
"""
Calculates a regression line on x,y pairs.
Usage: llinregress(x,y) x,y are equal-length lists of x-y coordinates
Returns: slope, intercept, r, two-tailed prob, sterr-of-estimate
"""
TINY = 1.0e-20
if len(x) <> len(y):
raise ValueError, 'Input values not paired in linregress. Aborting.'
n = len(x)
x = map(float,x)
y = map(float,y)
xmean = mean(x)
ymean = mean(y)
r_num = float(n*(summult(x,y)) - sum(x)*sum(y))
r_den = math.sqrt((n*ss(x) - square_of_sums(x))*(n*ss(y)-square_of_sums(y)))
r = r_num / r_den
z = 0.5*math.log((1.0+r+TINY)/(1.0-r+TINY))
df = n-2
t = r*math.sqrt(df/((1.0-r+TINY)*(1.0+r+TINY)))
prob = betai(0.5*df,0.5,df/(df+t*t))
slope = r_num / float(n*ss(x) - square_of_sums(x))
intercept = ymean - slope*xmean
sterrest = math.sqrt(1-r*r)*samplestdev(y)
return slope, intercept, r, prob, sterrest
####################################
##### INFERENTIAL STATISTICS #####
####################################
def lttest_1samp(a,popmean,printit=0,name='Sample',writemode='a'):
"""
Calculates the t-obtained for the independent samples T-test on ONE group
of scores a, given a population mean. If printit=1, results are printed
to the screen. If printit='filename', the results are output to 'filename'
using the given writemode (default=append). Returns t-value, and prob.
Usage: lttest_1samp(a,popmean,Name='Sample',printit=0,writemode='a')
Returns: t-value, two-tailed prob
"""
x = mean(a)
v = var(a)
n = len(a)
df = n-1
svar = ((n-1)*v)/float(df)
t = (x-popmean)/math.sqrt(svar*(1.0/n))
prob = betai(0.5*df,0.5,float(df)/(df+t*t))
if printit <> 0:
statname = 'Single-sample T-test.'
outputpairedstats(printit,writemode,
'Population','--',popmean,0,0,0,
name,n,x,v,min(a),max(a),
statname,t,prob)
return t,prob
def lttest_ind (a, b, printit=0, name1='Samp1', name2='Samp2', writemode='a'):
"""
Calculates the t-obtained T-test on TWO INDEPENDENT samples of
scores a, and b. From Numerical Recipies, p.483. If printit=1, results
are printed to the screen. If printit='filename', the results are output
to 'filename' using the given writemode (default=append). Returns t-value,
and prob.
Usage: lttest_ind(a,b,printit=0,name1='Samp1',name2='Samp2',writemode='a')
Returns: t-value, two-tailed prob
"""
x1 = mean(a)
x2 = mean(b)
v1 = stdev(a)**2
v2 = stdev(b)**2
n1 = len(a)
n2 = len(b)
df = n1+n2-2
svar = ((n1-1)*v1+(n2-1)*v2)/float(df)
t = (x1-x2)/math.sqrt(svar*(1.0/n1 + 1.0/n2))
prob = betai(0.5*df,0.5,df/(df+t*t))
if printit <> 0:
statname = 'Independent samples T-test.'
outputpairedstats(printit,writemode,
name1,n1,x1,v1,min(a),max(a),
name2,n2,x2,v2,min(b),max(b),
statname,t,prob)
return t,prob
def lttest_rel (a,b,printit=0,name1='Sample1',name2='Sample2',writemode='a'):
"""
Calculates the t-obtained T-test on TWO RELATED samples of scores,
a and b. From Numerical Recipies, p.483. If printit=1, results are
printed to the screen. If printit='filename', the results are output to
'filename' using the given writemode (default=append). Returns t-value,
and prob.
Usage: lttest_rel(a,b,printit=0,name1='Sample1',name2='Sample2',writemode='a')
Returns: t-value, two-tailed prob
"""
if len(a)<>len(b):
raise ValueError, 'Unequal length lists in ttest_rel.'
x1 = mean(a)
x2 = mean(b)
v1 = var(a)
v2 = var(b)
n = len(a)
cov = 0
for i in range(len(a)):
cov = cov + (a[i]-x1) * (b[i]-x2)
df = n-1
cov = cov / float(df)
sd = math.sqrt((v1+v2 - 2.0*cov)/float(n))
t = (x1-x2)/sd
prob = betai(0.5*df,0.5,df/(df+t*t))
if printit <> 0:
statname = 'Related samples T-test.'
outputpairedstats(printit,writemode,
name1,n,x1,v1,min(a),max(a),
name2,n,x2,v2,min(b),max(b),
statname,t,prob)
return t, prob
def lchisquare(f_obs,f_exp=None):
"""
Calculates a one-way chi square for list of observed frequencies and returns
the result. If no expected frequencies are given, the total N is assumed to
be equally distributed across all groups.
Usage: lchisquare(f_obs, f_exp=None) f_obs = list of observed cell freq.
Returns: chisquare-statistic, associated p-value
"""
k = len(f_obs) # number of groups
if f_exp == None:
f_exp = [sum(f_obs)/float(k)] * len(f_obs) # create k bins with = freq.
chisq = 0
for i in range(len(f_obs)):
chisq = chisq + (f_obs[i]-f_exp[i])**2 / float(f_exp[i])
return chisq, chisqprob(chisq, k-1)
def lks_2samp (data1,data2):
"""
Computes the Kolmogorov-Smirnof statistic on 2 samples. From
Numerical Recipies in C, page 493.
Usage: lks_2samp(data1,data2) data1&2 are lists of values for 2 conditions
Returns: KS D-value, associated p-value
"""
j1 = 0
j2 = 0
fn1 = 0.0
fn2 = 0.0
n1 = len(data1)
n2 = len(data2)
en1 = n1
en2 = n2
d = 0.0
data1.sort()
data2.sort()
while j1 < n1 and j2 < n2:
d1=data1[j1]
d2=data2[j2]
if d1 <= d2:
fn1 = (j1)/float(en1)
j1 = j1 + 1
if d2 <= d1:
fn2 = (j2)/float(en2)
j2 = j2 + 1
dt = (fn2-fn1)
if math.fabs(dt) > math.fabs(d):
d = dt
try:
en = math.sqrt(en1*en2/float(en1+en2))
prob = ksprob((en+0.12+0.11/en)*abs(d))
except:
prob = 1.0
return d, prob
def lmannwhitneyu(x,y):
"""
Calculates a Mann-Whitney U statistic on the provided scores and
returns the result. Use only when the n in each condition is < 20 and
you have 2 independent samples of ranks. NOTE: Mann-Whitney U is
significant if the u-obtained is LESS THAN or equal to the critical
value of U found in the tables. Equivalent to Kruskal-Wallis H with
just 2 groups.
Usage: lmannwhitneyu(data)
Returns: u-statistic, one-tailed p-value (i.e., p(z(U)))
"""
n1 = len(x)
n2 = len(y)
ranked = rankdata(x+y)
rankx = ranked[0:n1] # get the x-ranks
ranky = ranked[n1:] # the rest are y-ranks
u1 = n1*n2 + (n1*(n1+1))/2.0 - sum(rankx) # calc U for x
u2 = n1*n2 - u1 # remainder is U for y
bigu = max(u1,u2)
smallu = min(u1,u2)
T = math.sqrt(tiecorrect(ranked)) # correction factor for tied scores
if T == 0:
raise ValueError, 'All numbers are identical in lmannwhitneyu'
sd = math.sqrt(T*n1*n2*(n1+n2+1)/12.0)
z = abs((bigu-n1*n2/2.0) / sd) # normal approximation for prob calc
return smallu, 1.0 - zprob(z)
def ltiecorrect(rankvals):
"""
Corrects for ties in Mann Whitney U and Kruskal Wallis H tests. See
Siegel, S. (1956) Nonparametric Statistics for the Behavioral Sciences.
New York: McGraw-Hill. Code adapted from |Stat rankind.c code.
Usage: ltiecorrect(rankvals)
Returns: T correction factor for U or H
"""
sorted,posn = shellsort(rankvals)
n = len(sorted)
T = 0.0
i = 0
while (i<n-1):
if sorted[i] == sorted[i+1]:
nties = 1
while (i<n-1) and (sorted[i] == sorted[i+1]):
nties = nties +1
i = i +1
T = T + nties**3 - nties
i = i+1
T = T / float(n**3-n)
return 1.0 - T
def lranksums(x,y):
"""
Calculates the rank sums statistic on the provided scores and
returns the result. Use only when the n in each condition is > 20 and you
have 2 independent samples of ranks.
Usage: lranksums(x,y)
Returns: a z-statistic, two-tailed p-value
"""
n1 = len(x)
n2 = len(y)
alldata = x+y
ranked = rankdata(alldata)
x = ranked[:n1]
y = ranked[n1:]
s = sum(x)
expected = n1*(n1+n2+1) / 2.0
z = (s - expected) / math.sqrt(n1*n2*(n1+n2+1)/12.0)
prob = 2*(1.0 -zprob(abs(z)))
return z, prob
def lwilcoxont(x,y):
"""
Calculates the Wilcoxon T-test for related samples and returns the
result. A non-parametric T-test.
Usage: lwilcoxont(x,y)
Returns: a t-statistic, two-tail probability estimate
"""
if len(x) <> len(y):
raise ValueError, 'Unequal N in wilcoxont. Aborting.'
d=[]
for i in range(len(x)):
diff = x[i] - y[i]
if diff <> 0:
d.append(diff)
count = len(d)
absd = map(abs,d)
absranked = rankdata(absd)
r_plus = 0.0
r_minus = 0.0
for i in range(len(absd)):
if d[i] < 0:
r_minus = r_minus + absranked[i]
else:
r_plus = r_plus + absranked[i]
wt = min(r_plus, r_minus)
mn = count * (count+1) * 0.25
se = math.sqrt(count*(count+1)*(2.0*count+1.0)/24.0)
z = math.fabs(wt-mn) / se
prob = 2*(1.0 -zprob(abs(z)))
return wt, prob
def lkruskalwallish(*args):
"""
The Kruskal-Wallis H-test is a non-parametric ANOVA for 3 or more
groups, requiring at least 5 subjects in each group. This function
calculates the Kruskal-Wallis H-test for 3 or more independent samples
and returns the result.
Usage: lkruskalwallish(*args)
Returns: H-statistic (corrected for ties), associated p-value
"""
args = list(args)
n = [0]*len(args)
all = []
n = map(len,args)
for i in range(len(args)):
all = all + args[i]
ranked = rankdata(all)
T = tiecorrect(ranked)
for i in range(len(args)):
args[i] = ranked[0:n[i]]
del ranked[0:n[i]]
rsums = []
for i in range(len(args)):
rsums.append(sum(args[i])**2)
rsums[i] = rsums[i] / float(n[i])
ssbn = sum(rsums)
totaln = sum(n)
h = 12.0 / (totaln*(totaln+1)) * ssbn - 3*(totaln+1)
df = len(args) - 1
if T == 0:
raise ValueError, 'All numbers are identical in lkruskalwallish'
h = h / float(T)
return h, chisqprob(h,df)
def lfriedmanchisquare(*args):
"""
Friedman Chi-Square is a non-parametric, one-way within-subjects
ANOVA. This function calculates the Friedman Chi-square test for repeated
measures and returns the result, along with the associated probability
value. It assumes 3 or more repeated measures. Only 3 levels requires a
minimum of 10 subjects in the study. Four levels requires 5 subjects per
level(??).
Usage: lfriedmanchisquare(*args)
Returns: chi-square statistic, associated p-value
"""
k = len(args)
if k < 3:
raise ValueError, 'Less than 3 levels. Friedman test not appropriate.'
n = len(args[0])
data = apply(pstat.abut,tuple(args))
for i in range(len(data)):
data[i] = rankdata(data[i])
ssbn = 0
for i in range(k):
ssbn = ssbn + sum(args[i])**2
chisq = 12.0 / (k*n*(k+1)) * ssbn - 3*n*(k+1)
return chisq, chisqprob(chisq,k-1)
####################################
#### PROBABILITY CALCULATIONS ####
####################################
def lchisqprob(chisq,df):
"""
Returns the (1-tailed) probability value associated with the provided
chi-square value and df. Adapted from chisq.c in Gary Perlman's |Stat.
Usage: lchisqprob(chisq,df)
"""
BIG = 20.0
def ex(x):
BIG = 20.0
if x < -BIG:
return 0.0
else:
return math.exp(x)
if chisq <=0 or df < 1:
return 1.0
a = 0.5 * chisq
if df%2 == 0:
even = 1
else:
even = 0
if df > 1:
y = ex(-a)
if even:
s = y
else:
s = 2.0 * zprob(-math.sqrt(chisq))
if (df > 2):
chisq = 0.5 * (df - 1.0)
if even:
z = 1.0
else:
z = 0.5
if a > BIG:
if even:
e = 0.0
else:
e = math.log(math.sqrt(math.pi))
c = math.log(a)
while (z <= chisq):
e = math.log(z) + e
s = s + ex(c*z-a-e)
z = z + 1.0
return s
else:
if even:
e = 1.0
else:
e = 1.0 / math.sqrt(math.pi) / math.sqrt(a)
c = 0.0
while (z <= chisq):
e = e * (a/float(z))
c = c + e
z = z + 1.0
return (c*y+s)
else:
return s
def lerfcc(x):
"""
Returns the complementary error function erfc(x) with fractional
error everywhere less than 1.2e-7. Adapted from Numerical Recipies.
Usage: lerfcc(x)
"""
z = abs(x)
t = 1.0 / (1.0+0.5*z)
ans = t * math.exp(-z*z-1.26551223 + t*(1.00002368+t*(0.37409196+t*(0.09678418+t*(-0.18628806+t*(0.27886807+t*(-1.13520398+t*(1.48851587+t*(-0.82215223+t*0.17087277)))))))))
if x >= 0:
return ans
else:
return 2.0 - ans
def lzprob(z):
"""
Returns the area under the normal curve 'to the left of' the given z value.
Thus,
for z<0, zprob(z) = 1-tail probability
for z>0, 1.0-zprob(z) = 1-tail probability
for any z, 2.0*(1.0-zprob(abs(z))) = 2-tail probability
Adapted from z.c in Gary Perlman's |Stat.
Usage: lzprob(z)
"""
Z_MAX = 6.0 # maximum meaningful z-value
if z == 0.0:
x = 0.0
else:
y = 0.5 * math.fabs(z)
if y >= (Z_MAX*0.5):
x = 1.0
elif (y < 1.0):
w = y*y
x = ((((((((0.000124818987 * w
-0.001075204047) * w +0.005198775019) * w
-0.019198292004) * w +0.059054035642) * w
-0.151968751364) * w +0.319152932694) * w
-0.531923007300) * w +0.797884560593) * y * 2.0
else:
y = y - 2.0
x = (((((((((((((-0.000045255659 * y
+0.000152529290) * y -0.000019538132) * y
-0.000676904986) * y +0.001390604284) * y
-0.000794620820) * y -0.002034254874) * y
+0.006549791214) * y -0.010557625006) * y
+0.011630447319) * y -0.009279453341) * y
+0.005353579108) * y -0.002141268741) * y
+0.000535310849) * y +0.999936657524
if z > 0.0:
prob = ((x+1.0)*0.5)
else:
prob = ((1.0-x)*0.5)
return prob
def lksprob(alam):
"""
Computes a Kolmolgorov-Smirnov t-test significance level. Adapted from
Numerical Recipies.
Usage: lksprob(alam)
"""
fac = 2.0
sum = 0.0
termbf = 0.0
a2 = -2.0*alam*alam
for j in range(1,201):
term = fac*math.exp(a2*j*j)
sum = sum + term
if math.fabs(term) <= (0.001*termbf) or math.fabs(term) < (1.0e-8*sum):
return sum
fac = -fac
termbf = math.fabs(term)
return 1.0 # Get here only if fails to converge; was 0.0!!
def lfprob (dfnum, dfden, F):
"""
Returns the (1-tailed) significance level (p-value) of an F
statistic given the degrees of freedom for the numerator (dfR-dfF) and
the degrees of freedom for the denominator (dfF).
Usage: lfprob(dfnum, dfden, F) where usually dfnum=dfbn, dfden=dfwn
"""
p = betai(0.5*dfden, 0.5*dfnum, dfden/float(dfden+dfnum*F))
return p
def lbetacf(a,b,x):
"""
This function evaluates the continued fraction form of the incomplete
Beta function, betai. (Adapted from: Numerical Recipies in C.)
Usage: lbetacf(a,b,x)
"""
ITMAX = 200
EPS = 3.0e-7
bm = az = am = 1.0
qab = a+b
qap = a+1.0
qam = a-1.0
bz = 1.0-qab*x/qap
for i in range(ITMAX+1):
em = float(i+1)
tem = em + em
d = em*(b-em)*x/((qam+tem)*(a+tem))
ap = az + d*am
bp = bz+d*bm
d = -(a+em)*(qab+em)*x/((qap+tem)*(a+tem))
app = ap+d*az
bpp = bp+d*bz
aold = az
am = ap/bpp
bm = bp/bpp
az = app/bpp
bz = 1.0
if (abs(az-aold)<(EPS*abs(az))):
return az
print 'a or b too big, or ITMAX too small in Betacf.'
def lgammln(xx):
"""
Returns the gamma function of xx.
Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt.
(Adapted from: Numerical Recipies in C.)
Usage: lgammln(xx)
"""
coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516,
0.120858003e-2, -0.536382e-5]
x = xx - 1.0
tmp = x + 5.5
tmp = tmp - (x+0.5)*math.log(tmp)
ser = 1.0
for j in range(len(coeff)):
x = x + 1
ser = ser + coeff[j]/x
return -tmp + math.log(2.50662827465*ser)
def lbetai(a,b,x):
"""
Returns the incomplete beta function:
I-sub-x(a,b) = 1/B(a,b)*(Integral(0,x) of t^(a-1)(1-t)^(b-1) dt)
where a,b>0 and B(a,b) = G(a)*G(b)/(G(a+b)) where G(a) is the gamma
function of a. The continued fraction formulation is implemented here,
using the betacf function. (Adapted from: Numerical Recipies in C.)
Usage: lbetai(a,b,x)
"""
if (x<0.0 or x>1.0):
raise ValueError, 'Bad x in lbetai'
if (x==0.0 or x==1.0):
bt = 0.0
else:
bt = math.exp(gammln(a+b)-gammln(a)-gammln(b)+a*math.log(x)+b*
math.log(1.0-x))
if (x<(a+1.0)/(a+b+2.0)):
return bt*betacf(a,b,x)/float(a)
else:
return 1.0-bt*betacf(b,a,1.0-x)/float(b)
####################################
####### ANOVA CALCULATIONS #######
####################################
def lF_oneway(*lists):
"""
Performs a 1-way ANOVA, returning an F-value and probability given
any number of groups. From Heiman, pp.394-7.
Usage: F_oneway(*lists) where *lists is any number of lists, one per
treatment group
Returns: F value, one-tailed p-value
"""
a = len(lists) # ANOVA on 'a' groups, each in it's own list
means = [0]*a
vars = [0]*a
ns = [0]*a
alldata = []
tmp = map(N.array,lists)
means = map(amean,tmp)
vars = map(avar,tmp)
ns = map(len,lists)
for i in range(len(lists)):
alldata = alldata + lists[i]
alldata = N.array(alldata)
bign = len(alldata)
sstot = ass(alldata)-(asquare_of_sums(alldata)/float(bign))
ssbn = 0
for list in lists:
ssbn = ssbn + asquare_of_sums(N.array(list))/float(len(list))
ssbn = ssbn - (asquare_of_sums(alldata)/float(bign))
sswn = sstot-ssbn
dfbn = a-1
dfwn = bign - a
msb = ssbn/float(dfbn)
msw = sswn/float(dfwn)
f = msb/msw
prob = fprob(dfbn,dfwn,f)
return f, prob
def lF_value (ER,EF,dfnum,dfden):
"""
Returns an F-statistic given the following:
ER = error associated with the null hypothesis (the Restricted model)
EF = error associated with the alternate hypothesis (the Full model)
dfR-dfF = degrees of freedom of the numerator
dfF = degrees of freedom associated with the denominator/Full model
Usage: lF_value(ER,EF,dfnum,dfden)
"""
return ((ER-EF)/float(dfnum) / (EF/float(dfden)))
####################################
######## SUPPORT FUNCTIONS #######
####################################
def writecc (listoflists,file,writetype='w',extra=2):
"""
Writes a list of lists to a file in columns, customized by the max
size of items within the columns (max size of items in col, +2 characters)
to specified file. File-overwrite is the default.
Usage: writecc (listoflists,file,writetype='w',extra=2)
Returns: None
"""
if type(listoflists[0]) not in [ListType,TupleType]:
listoflists = [listoflists]
outfile = open(file,writetype)
rowstokill = []
list2print = copy.deepcopy(listoflists)
for i in range(len(listoflists)):
if listoflists[i] == ['\n'] or listoflists[i]=='\n' or listoflists[i]=='dashes':
rowstokill = rowstokill + [i]
rowstokill.reverse()
for row in rowstokill:
del list2print[row]
maxsize = [0]*len(list2print[0])
for col in range(len(list2print[0])):
items = pstat.colex(list2print,col)
items = map(pstat.makestr,items)
maxsize[col] = max(map(len,items)) + extra
for row in listoflists:
if row == ['\n'] or row == '\n':
outfile.write('\n')
elif row == ['dashes'] or row == 'dashes':
dashes = [0]*len(maxsize)
for j in range(len(maxsize)):
dashes[j] = '-'*(maxsize[j]-2)
outfile.write(pstat.lineincustcols(dashes,maxsize))
else:
outfile.write(pstat.lineincustcols(row,maxsize))
outfile.write('\n')
outfile.close()
return None
def lincr(l,cap): # to increment a list up to a max-list of 'cap'
"""
Simulate a counting system from an n-dimensional list.
Usage: lincr(l,cap) l=list to increment, cap=max values for each list pos'n
Returns: next set of values for list l, OR -1 (if overflow)
"""
l[0] = l[0] + 1 # e.g., [0,0,0] --> [2,4,3] (=cap)
for i in range(len(l)):
if l[i] > cap[i] and i < len(l)-1: # if carryover AND not done
l[i] = 0
l[i+1] = l[i+1] + 1
elif l[i] > cap[i] and i == len(l)-1: # overflow past last column, must be finished
l = -1
return l
def lsum (inlist):
"""
Returns the sum of the items in the passed list.
Usage: lsum(inlist)
"""
s = 0
for item in inlist:
s = s + item
return s
def lcumsum (inlist):
"""
Returns a list consisting of the cumulative sum of the items in the
passed list.
Usage: lcumsum(inlist)
"""
newlist = copy.deepcopy(inlist)
for i in range(1,len(newlist)):
newlist[i] = newlist[i] + newlist[i-1]
return newlist
def lss(inlist):
"""
Squares each value in the passed list, adds up these squares and
returns the result.
Usage: lss(inlist)
"""
ss = 0
for item in inlist:
ss = ss + item*item
return ss
def lsummult (list1,list2):
"""
Multiplies elements in list1 and list2, element by element, and
returns the sum of all resulting multiplications. Must provide equal
length lists.
Usage: lsummult(list1,list2)
"""
if len(list1) <> len(list2):
raise ValueError, "Lists not equal length in summult."
s = 0
for item1,item2 in pstat.abut(list1,list2):
s = s + item1*item2
return s
def lsumdiffsquared(x,y):
"""
Takes pairwise differences of the values in lists x and y, squares
these differences, and returns the sum of these squares.
Usage: lsumdiffsquared(x,y)
Returns: sum[(x[i]-y[i])**2]
"""
sds = 0
for i in range(len(x)):
sds = sds + (x[i]-y[i])**2
return sds
def lsquare_of_sums(inlist):
"""
Adds the values in the passed list, squares the sum, and returns
the result.
Usage: lsquare_of_sums(inlist)
Returns: sum(inlist[i])**2
"""
s = sum(inlist)
return float(s)*s
def lshellsort(inlist):
"""
Shellsort algorithm. Sorts a 1D-list.
Usage: lshellsort(inlist)
Returns: sorted-inlist, sorting-index-vector (for original list)
"""
n = len(inlist)
svec = copy.deepcopy(inlist)
ivec = range(n)
gap = n/2 # integer division needed
while gap >0:
for i in range(gap,n):
for j in range(i-gap,-1,-gap):
while j>=0 and svec[j]>svec[j+gap]:
temp = svec[j]
svec[j] = svec[j+gap]
svec[j+gap] = temp
itemp = ivec[j]
ivec[j] = ivec[j+gap]
ivec[j+gap] = itemp
gap = gap / 2 # integer division needed
# svec is now sorted inlist, and ivec has the order svec[i] = vec[ivec[i]]
return svec, ivec
def lrankdata(inlist):
"""
Ranks the data in inlist, dealing with ties appropritely. Assumes
a 1D inlist. Adapted from Gary Perlman's |Stat ranksort.
Usage: lrankdata(inlist)
Returns: a list of length equal to inlist, containing rank scores
"""
n = len(inlist)
svec, ivec = shellsort(inlist)
sumranks = 0
dupcount = 0
newlist = [0]*n
for i in range(n):
sumranks = sumranks + i
dupcount = dupcount + 1
if i==n-1 or svec[i] <> svec[i+1]:
averank = sumranks / float(dupcount) + 1
for j in range(i-dupcount+1,i+1):
newlist[ivec[j]] = averank
sumranks = 0
dupcount = 0
return newlist
def outputpairedstats(fname,writemode,name1,n1,m1,se1,min1,max1,name2,n2,m2,se2,min2,max2,statname,stat,prob):
"""
Prints or write to a file stats for two groups, using the name, n,
mean, sterr, min and max for each group, as well as the statistic name,
its value, and the associated p-value.
Usage: outputpairedstats(fname,writemode,
name1,n1,mean1,stderr1,min1,max1,
name2,n2,mean2,stderr2,min2,max2,
statname,stat,prob)
Returns: None
"""
suffix = '' # for *s after the p-value
try:
x = prob.shape
prob = prob[0]
except:
pass
if prob < 0.001: suffix = ' ***'
elif prob < 0.01: suffix = ' **'
elif prob < 0.05: suffix = ' *'
title = [['Name','N','Mean','SD','Min','Max']]
lofl = title+[[name1,n1,round(m1,3),round(math.sqrt(se1),3),min1,max1],
[name2,n2,round(m2,3),round(math.sqrt(se2),3),min2,max2]]
if type(fname)<>StringType or len(fname)==0:
print
print statname
print
pstat.printcc(lofl)
print
try:
if stat.shape == ():
stat = stat[0]
if prob.shape == ():
prob = prob[0]
except:
pass
print 'Test statistic = ',round(stat,3),' p = ',round(prob,3),suffix
print
else:
file = open(fname,writemode)
file.write('\n'+statname+'\n\n')
file.close()
writecc(lofl,fname,'a')
file = open(fname,'a')
try:
if stat.shape == ():
stat = stat[0]
if prob.shape == ():
prob = prob[0]
except:
pass
file.write(pstat.list2string(['\nTest statistic = ',round(stat,4),' p = ',round(prob,4),suffix,'\n\n']))
file.close()
return None
def lfindwithin (data):
"""
Returns an integer representing a binary vector, where 1=within-
subject factor, 0=between. Input equals the entire data 2D list (i.e.,
column 0=random factor, column -1=measured values (those two are skipped).
Note: input data is in |Stat format ... a list of lists ("2D list") with
one row per measured value, first column=subject identifier, last column=
score, one in-between column per factor (these columns contain level
designations on each factor). See also stats.anova.__doc__.
Usage: lfindwithin(data) data in |Stat format
"""
numfact = len(data[0])-1
withinvec = 0
for col in range(1,numfact):
examplelevel = pstat.unique(pstat.colex(data,col))[0]
rows = pstat.linexand(data,col,examplelevel) # get 1 level of this factor
factsubjs = pstat.unique(pstat.colex(rows,0))
allsubjs = pstat.unique(pstat.colex(data,0))
if len(factsubjs) == len(allsubjs): # fewer Ss than scores on this factor?
withinvec = withinvec + (1 << col)
return withinvec
#########################################################
#########################################################
####### DISPATCH LISTS AND TUPLES TO ABOVE FCNS #########
#########################################################
#########################################################
## CENTRAL TENDENCY:
geometricmean = Dispatch ( (lgeometricmean, (ListType, TupleType)), )
harmonicmean = Dispatch ( (lharmonicmean, (ListType, TupleType)), )
mean = Dispatch ( (lmean, (ListType, TupleType)), )
median = Dispatch ( (lmedian, (ListType, TupleType)), )
medianscore = Dispatch ( (lmedianscore, (ListType, TupleType)), )
mode = Dispatch ( (lmode, (ListType, TupleType)), )
## MOMENTS:
moment = Dispatch ( (lmoment, (ListType, TupleType)), )
variation = Dispatch ( (lvariation, (ListType, TupleType)), )
skew = Dispatch ( (lskew, (ListType, TupleType)), )
kurtosis = Dispatch ( (lkurtosis, (ListType, TupleType)), )
describe = Dispatch ( (ldescribe, (ListType, TupleType)), )
## FREQUENCY STATISTICS:
itemfreq = Dispatch ( (litemfreq, (ListType, TupleType)), )
scoreatpercentile = Dispatch ( (lscoreatpercentile, (ListType, TupleType)), )
percentileofscore = Dispatch ( (lpercentileofscore, (ListType, TupleType)), )
histogram = Dispatch ( (lhistogram, (ListType, TupleType)), )
cumfreq = Dispatch ( (lcumfreq, (ListType, TupleType)), )
relfreq = Dispatch ( (lrelfreq, (ListType, TupleType)), )
## VARIABILITY:
obrientransform = Dispatch ( (lobrientransform, (ListType, TupleType)), )
samplevar = Dispatch ( (lsamplevar, (ListType, TupleType)), )
samplestdev = Dispatch ( (lsamplestdev, (ListType, TupleType)), )
var = Dispatch ( (lvar, (ListType, TupleType)), )
stdev = Dispatch ( (lstdev, (ListType, TupleType)), )
sterr = Dispatch ( (lsterr, (ListType, TupleType)), )
sem = Dispatch ( (lsem, (ListType, TupleType)), )
z = Dispatch ( (lz, (ListType, TupleType)), )
zs = Dispatch ( (lzs, (ListType, TupleType)), )
## TRIMMING FCNS:
trimboth = Dispatch ( (ltrimboth, (ListType, TupleType)), )
trim1 = Dispatch ( (ltrim1, (ListType, TupleType)), )
## CORRELATION FCNS:
paired = Dispatch ( (lpaired, (ListType, TupleType)), )
pearsonr = Dispatch ( (lpearsonr, (ListType, TupleType)), )
spearmanr = Dispatch ( (lspearmanr, (ListType, TupleType)), )
pointbiserialr = Dispatch ( (lpointbiserialr, (ListType, TupleType)), )
kendalltau = Dispatch ( (lkendalltau, (ListType, TupleType)), )
linregress = Dispatch ( (llinregress, (ListType, TupleType)), )
## INFERENTIAL STATS:
ttest_1samp = Dispatch ( (lttest_1samp, (ListType, TupleType)), )
ttest_ind = Dispatch ( (lttest_ind, (ListType, TupleType)), )
ttest_rel = Dispatch ( (lttest_rel, (ListType, TupleType)), )
chisquare = Dispatch ( (lchisquare, (ListType, TupleType)), )
ks_2samp = Dispatch ( (lks_2samp, (ListType, TupleType)), )
mannwhitneyu = Dispatch ( (lmannwhitneyu, (ListType, TupleType)), )
ranksums = Dispatch ( (lranksums, (ListType, TupleType)), )
tiecorrect = Dispatch ( (ltiecorrect, (ListType, TupleType)), )
wilcoxont = Dispatch ( (lwilcoxont, (ListType, TupleType)), )
kruskalwallish = Dispatch ( (lkruskalwallish, (ListType, TupleType)), )
friedmanchisquare = Dispatch ( (lfriedmanchisquare, (ListType, TupleType)), )
## PROBABILITY CALCS:
chisqprob = Dispatch ( (lchisqprob, (IntType, FloatType)), )
zprob = Dispatch ( (lzprob, (IntType, FloatType)), )
ksprob = Dispatch ( (lksprob, (IntType, FloatType)), )
fprob = Dispatch ( (lfprob, (IntType, FloatType)), )
betacf = Dispatch ( (lbetacf, (IntType, FloatType)), )
betai = Dispatch ( (lbetai, (IntType, FloatType)), )
erfcc = Dispatch ( (lerfcc, (IntType, FloatType)), )
gammln = Dispatch ( (lgammln, (IntType, FloatType)), )
## ANOVA FUNCTIONS:
F_oneway = Dispatch ( (lF_oneway, (ListType, TupleType)), )
F_value = Dispatch ( (lF_value, (ListType, TupleType)), )
## SUPPORT FUNCTIONS:
incr = Dispatch ( (lincr, (ListType, TupleType)), )
sum = Dispatch ( (lsum, (ListType, TupleType)), )
cumsum = Dispatch ( (lcumsum, (ListType, TupleType)), )
ss = Dispatch ( (lss, (ListType, TupleType)), )
summult = Dispatch ( (lsummult, (ListType, TupleType)), )
square_of_sums = Dispatch ( (lsquare_of_sums, (ListType, TupleType)), )
sumdiffsquared = Dispatch ( (lsumdiffsquared, (ListType, TupleType)), )
shellsort = Dispatch ( (lshellsort, (ListType, TupleType)), )
rankdata = Dispatch ( (lrankdata, (ListType, TupleType)), )
findwithin = Dispatch ( (lfindwithin, (ListType, TupleType)), )
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
#============= THE ARRAY-VERSION OF THE STATS FUNCTIONS ===============
try: # DEFINE THESE *ONLY* IF NUMERIC IS AVAILABLE
import numpy as N
import numpy.linalg as LA
#####################################
######## ACENTRAL TENDENCY ########
#####################################
def ageometricmean (inarray,dimension=None,keepdims=0):
"""
Calculates the geometric mean of the values in the passed array.
That is: n-th root of (x1 * x2 * ... * xn). Defaults to ALL values in
the passed array. Use dimension=None to flatten array first. REMEMBER: if
dimension=0, it collapses over dimension 0 ('rows' in a 2D array) only, and
if dimension is a sequence, it collapses over all specified dimensions. If
keepdims is set to 1, the resulting array will have as many dimensions as
inarray, with only 1 'level' per dim that was collapsed over.
Usage: ageometricmean(inarray,dimension=None,keepdims=0)
Returns: geometric mean computed over dim(s) listed in dimension
"""
inarray = N.array(inarray,N.float_)
if dimension == None:
inarray = N.ravel(inarray)
size = len(inarray)
mult = N.power(inarray,1.0/size)
mult = N.multiply.reduce(mult)
elif type(dimension) in [IntType,FloatType]:
size = inarray.shape[dimension]
mult = N.power(inarray,1.0/size)
mult = N.multiply.reduce(mult,dimension)
if keepdims == 1:
shp = list(inarray.shape)
shp[dimension] = 1
sum = N.reshape(sum,shp)
else: # must be a SEQUENCE of dims to average over
dims = list(dimension)
dims.sort()
dims.reverse()
size = N.array(N.multiply.reduce(N.take(inarray.shape,dims)),N.float_)
mult = N.power(inarray,1.0/size)
for dim in dims:
mult = N.multiply.reduce(mult,dim)
if keepdims == 1:
shp = list(inarray.shape)
for dim in dims:
shp[dim] = 1
mult = N.reshape(mult,shp)
return mult
def aharmonicmean (inarray,dimension=None,keepdims=0):
"""
Calculates the harmonic mean of the values in the passed array.
That is: n / (1/x1 + 1/x2 + ... + 1/xn). Defaults to ALL values in
the passed array. Use dimension=None to flatten array first. REMEMBER: if
dimension=0, it collapses over dimension 0 ('rows' in a 2D array) only, and
if dimension is a sequence, it collapses over all specified dimensions. If
keepdims is set to 1, the resulting array will have as many dimensions as
inarray, with only 1 'level' per dim that was collapsed over.
Usage: aharmonicmean(inarray,dimension=None,keepdims=0)
Returns: harmonic mean computed over dim(s) in dimension
"""
inarray = inarray.astype(N.float_)
if dimension == None:
inarray = N.ravel(inarray)
size = len(inarray)
s = N.add.reduce(1.0 / inarray)
elif type(dimension) in [IntType,FloatType]:
size = float(inarray.shape[dimension])
s = N.add.reduce(1.0/inarray, dimension)
if keepdims == 1:
shp = list(inarray.shape)
shp[dimension] = 1
s = N.reshape(s,shp)
else: # must be a SEQUENCE of dims to average over
dims = list(dimension)
dims.sort()
nondims = []
for i in range(len(inarray.shape)):
if i not in dims:
nondims.append(i)
tinarray = N.transpose(inarray,nondims+dims) # put keep-dims first
idx = [0] *len(nondims)
if idx == []:
size = len(N.ravel(inarray))
s = asum(1.0 / inarray)
if keepdims == 1:
s = N.reshape([s],N.ones(len(inarray.shape)))
else:
idx[0] = -1
loopcap = N.array(tinarray.shape[0:len(nondims)]) -1
s = N.zeros(loopcap+1,N.float_)
while incr(idx,loopcap) <> -1:
s[idx] = asum(1.0/tinarray[idx])
size = N.multiply.reduce(N.take(inarray.shape,dims))
if keepdims == 1:
shp = list(inarray.shape)
for dim in dims:
shp[dim] = 1
s = N.reshape(s,shp)
return size / s
def amean (inarray,dimension=None,keepdims=0):
"""
Calculates the arithmatic mean of the values in the passed array.
That is: 1/n * (x1 + x2 + ... + xn). Defaults to ALL values in the
passed array. Use dimension=None to flatten array first. REMEMBER: if
dimension=0, it collapses over dimension 0 ('rows' in a 2D array) only, and
if dimension is a sequence, it collapses over all specified dimensions. If
keepdims is set to 1, the resulting array will have as many dimensions as
inarray, with only 1 'level' per dim that was collapsed over.
Usage: amean(inarray,dimension=None,keepdims=0)
Returns: arithematic mean calculated over dim(s) in dimension
"""
if inarray.dtype in [N.int_, N.short,N.ubyte]:
inarray = inarray.astype(N.float_)
if dimension == None:
inarray = N.ravel(inarray)
sum = N.add.reduce(inarray)
denom = float(len(inarray))
elif type(dimension) in [IntType,FloatType]:
sum = asum(inarray,dimension)
denom = float(inarray.shape[dimension])
if keepdims == 1:
shp = list(inarray.shape)
shp[dimension] = 1
sum = N.reshape(sum,shp)
else: # must be a TUPLE of dims to average over
dims = list(dimension)
dims.sort()
dims.reverse()
sum = inarray *1.0
for dim in dims:
sum = N.add.reduce(sum,dim)
denom = N.array(N.multiply.reduce(N.take(inarray.shape,dims)),N.float_)
if keepdims == 1:
shp = list(inarray.shape)
for dim in dims:
shp[dim] = 1
sum = N.reshape(sum,shp)
return sum/denom
def amedian (inarray,numbins=1000):
"""
Calculates the COMPUTED median value of an array of numbers, given the
number of bins to use for the histogram (more bins approaches finding the
precise median value of the array; default number of bins = 1000). From
G.W. Heiman's Basic Stats, or CRC Probability & Statistics.
NOTE: THIS ROUTINE ALWAYS uses the entire passed array (flattens it first).
Usage: amedian(inarray,numbins=1000)
Returns: median calculated over ALL values in inarray
"""
inarray = N.ravel(inarray)
(hist, smallest, binsize, extras) = ahistogram(inarray,numbins,[min(inarray),max(inarray)])
cumhist = N.cumsum(hist) # make cumulative histogram
otherbins = N.greater_equal(cumhist,len(inarray)/2.0)
otherbins = list(otherbins) # list of 0/1s, 1s start at median bin
cfbin = otherbins.index(1) # get 1st(!) index holding 50%ile score
LRL = smallest + binsize*cfbin # get lower read limit of that bin
cfbelow = N.add.reduce(hist[0:cfbin]) # cum. freq. below bin
freq = hist[cfbin] # frequency IN the 50%ile bin
median = LRL + ((len(inarray)/2.0-cfbelow)/float(freq))*binsize # MEDIAN
return median
def amedianscore (inarray,dimension=None):
"""
Returns the 'middle' score of the passed array. If there is an even
number of scores, the mean of the 2 middle scores is returned. Can function
with 1D arrays, or on the FIRST dimension of 2D arrays (i.e., dimension can
be None, to pre-flatten the array, or else dimension must equal 0).
Usage: amedianscore(inarray,dimension=None)
Returns: 'middle' score of the array, or the mean of the 2 middle scores
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
inarray = N.sort(inarray,dimension)
if inarray.shape[dimension] % 2 == 0: # if even number of elements
indx = inarray.shape[dimension]/2 # integer division correct
median = N.asarray(inarray[indx]+inarray[indx-1]) / 2.0
else:
indx = inarray.shape[dimension] / 2 # integer division correct
median = N.take(inarray,[indx],dimension)
if median.shape == (1,):
median = median[0]
return median
def amode(a, dimension=None):
"""
Returns an array of the modal (most common) score in the passed array.
If there is more than one such score, ONLY THE FIRST is returned.
The bin-count for the modal values is also returned. Operates on whole
array (dimension=None), or on a given dimension.
Usage: amode(a, dimension=None)
Returns: array of bin-counts for mode(s), array of corresponding modal values
"""
if dimension == None:
a = N.ravel(a)
dimension = 0
scores = pstat.aunique(N.ravel(a)) # get ALL unique values
testshape = list(a.shape)
testshape[dimension] = 1
oldmostfreq = N.zeros(testshape)
oldcounts = N.zeros(testshape)
for score in scores:
template = N.equal(a,score)
counts = asum(template,dimension,1)
mostfrequent = N.where(counts>oldcounts,score,oldmostfreq)
oldcounts = N.where(counts>oldcounts,counts,oldcounts)
oldmostfreq = mostfrequent
return oldcounts, mostfrequent
def atmean(a,limits=None,inclusive=(1,1)):
"""
Returns the arithmetic mean of all values in an array, ignoring values
strictly outside the sequence passed to 'limits'. Note: either limit
in the sequence, or the value of limits itself, can be set to None. The
inclusive list/tuple determines whether the lower and upper limiting bounds
(respectively) are open/exclusive (0) or closed/inclusive (1).
Usage: atmean(a,limits=None,inclusive=(1,1))
"""
if a.dtype in [N.int_, N.short,N.ubyte]:
a = a.astype(N.float_)
if limits == None:
return mean(a)
assert type(limits) in [ListType,TupleType,N.ndarray], "Wrong type for limits in atmean"
if inclusive[0]: lowerfcn = N.greater_equal
else: lowerfcn = N.greater
if inclusive[1]: upperfcn = N.less_equal
else: upperfcn = N.less
if limits[0] > N.maximum.reduce(N.ravel(a)) or limits[1] < N.minimum.reduce(N.ravel(a)):
raise ValueError, "No array values within given limits (atmean)."
elif limits[0]==None and limits[1]<>None:
mask = upperfcn(a,limits[1])
elif limits[0]<>None and limits[1]==None:
mask = lowerfcn(a,limits[0])
elif limits[0]<>None and limits[1]<>None:
mask = lowerfcn(a,limits[0])*upperfcn(a,limits[1])
s = float(N.add.reduce(N.ravel(a*mask)))
n = float(N.add.reduce(N.ravel(mask)))
return s/n
def atvar(a,limits=None,inclusive=(1,1)):
"""
Returns the sample variance of values in an array, (i.e., using N-1),
ignoring values strictly outside the sequence passed to 'limits'.
Note: either limit in the sequence, or the value of limits itself,
can be set to None. The inclusive list/tuple determines whether the lower
and upper limiting bounds (respectively) are open/exclusive (0) or
closed/inclusive (1). ASSUMES A FLAT ARRAY (OR ELSE PREFLATTENS).
Usage: atvar(a,limits=None,inclusive=(1,1))
"""
a = a.astype(N.float_)
if limits == None or limits == [None,None]:
return avar(a)
assert type(limits) in [ListType,TupleType,N.ndarray], "Wrong type for limits in atvar"
if inclusive[0]: lowerfcn = N.greater_equal
else: lowerfcn = N.greater
if inclusive[1]: upperfcn = N.less_equal
else: upperfcn = N.less
if limits[0] > N.maximum.reduce(N.ravel(a)) or limits[1] < N.minimum.reduce(N.ravel(a)):
raise ValueError, "No array values within given limits (atvar)."
elif limits[0]==None and limits[1]<>None:
mask = upperfcn(a,limits[1])
elif limits[0]<>None and limits[1]==None:
mask = lowerfcn(a,limits[0])
elif limits[0]<>None and limits[1]<>None:
mask = lowerfcn(a,limits[0])*upperfcn(a,limits[1])
a = N.compress(mask,a) # squish out excluded values
return avar(a)
def atmin(a,lowerlimit=None,dimension=None,inclusive=1):
"""
Returns the minimum value of a, along dimension, including only values less
than (or equal to, if inclusive=1) lowerlimit. If the limit is set to None,
all values in the array are used.
Usage: atmin(a,lowerlimit=None,dimension=None,inclusive=1)
"""
if inclusive: lowerfcn = N.greater
else: lowerfcn = N.greater_equal
if dimension == None:
a = N.ravel(a)
dimension = 0
if lowerlimit == None:
lowerlimit = N.minimum.reduce(N.ravel(a))-11
biggest = N.maximum.reduce(N.ravel(a))
ta = N.where(lowerfcn(a,lowerlimit),a,biggest)
return N.minimum.reduce(ta,dimension)
def atmax(a,upperlimit,dimension=None,inclusive=1):
"""
Returns the maximum value of a, along dimension, including only values greater
than (or equal to, if inclusive=1) upperlimit. If the limit is set to None,
a limit larger than the max value in the array is used.
Usage: atmax(a,upperlimit,dimension=None,inclusive=1)
"""
if inclusive: upperfcn = N.less
else: upperfcn = N.less_equal
if dimension == None:
a = N.ravel(a)
dimension = 0
if upperlimit == None:
upperlimit = N.maximum.reduce(N.ravel(a))+1
smallest = N.minimum.reduce(N.ravel(a))
ta = N.where(upperfcn(a,upperlimit),a,smallest)
return N.maximum.reduce(ta,dimension)
def atstdev(a,limits=None,inclusive=(1,1)):
"""
Returns the standard deviation of all values in an array, ignoring values
strictly outside the sequence passed to 'limits'. Note: either limit
in the sequence, or the value of limits itself, can be set to None. The
inclusive list/tuple determines whether the lower and upper limiting bounds
(respectively) are open/exclusive (0) or closed/inclusive (1).
Usage: atstdev(a,limits=None,inclusive=(1,1))
"""
return N.sqrt(tvar(a,limits,inclusive))
def atsem(a,limits=None,inclusive=(1,1)):
"""
Returns the standard error of the mean for the values in an array,
(i.e., using N for the denominator), ignoring values strictly outside
the sequence passed to 'limits'. Note: either limit in the sequence,
or the value of limits itself, can be set to None. The inclusive list/tuple
determines whether the lower and upper limiting bounds (respectively) are
open/exclusive (0) or closed/inclusive (1).
Usage: atsem(a,limits=None,inclusive=(1,1))
"""
sd = tstdev(a,limits,inclusive)
if limits == None or limits == [None,None]:
n = float(len(N.ravel(a)))
limits = [min(a)-1, max(a)+1]
assert type(limits) in [ListType,TupleType,N.ndarray], "Wrong type for limits in atsem"
if inclusive[0]: lowerfcn = N.greater_equal
else: lowerfcn = N.greater
if inclusive[1]: upperfcn = N.less_equal
else: upperfcn = N.less
if limits[0] > N.maximum.reduce(N.ravel(a)) or limits[1] < N.minimum.reduce(N.ravel(a)):
raise ValueError, "No array values within given limits (atsem)."
elif limits[0]==None and limits[1]<>None:
mask = upperfcn(a,limits[1])
elif limits[0]<>None and limits[1]==None:
mask = lowerfcn(a,limits[0])
elif limits[0]<>None and limits[1]<>None:
mask = lowerfcn(a,limits[0])*upperfcn(a,limits[1])
term1 = N.add.reduce(N.ravel(a*a*mask))
n = float(N.add.reduce(N.ravel(mask)))
return sd/math.sqrt(n)
#####################################
############ AMOMENTS #############
#####################################
def amoment(a,moment=1,dimension=None):
"""
Calculates the nth moment about the mean for a sample (defaults to the
1st moment). Generally used to calculate coefficients of skewness and
kurtosis. Dimension can equal None (ravel array first), an integer
(the dimension over which to operate), or a sequence (operate over
multiple dimensions).
Usage: amoment(a,moment=1,dimension=None)
Returns: appropriate moment along given dimension
"""
if dimension == None:
a = N.ravel(a)
dimension = 0
if moment == 1:
return 0.0
else:
mn = amean(a,dimension,1) # 1=keepdims
s = N.power((a-mn),moment)
return amean(s,dimension)
def avariation(a,dimension=None):
"""
Returns the coefficient of variation, as defined in CRC Standard
Probability and Statistics, p.6. Dimension can equal None (ravel array
first), an integer (the dimension over which to operate), or a
sequence (operate over multiple dimensions).
Usage: avariation(a,dimension=None)
"""
return 100.0*asamplestdev(a,dimension)/amean(a,dimension)
def askew(a,dimension=None):
"""
Returns the skewness of a distribution (normal ==> 0.0; >0 means extra
weight in left tail). Use askewtest() to see if it's close enough.
Dimension can equal None (ravel array first), an integer (the
dimension over which to operate), or a sequence (operate over multiple
dimensions).
Usage: askew(a, dimension=None)
Returns: skew of vals in a along dimension, returning ZERO where all vals equal
"""
denom = N.power(amoment(a,2,dimension),1.5)
zero = N.equal(denom,0)
if type(denom) == N.ndarray and asum(zero) <> 0:
print "Number of zeros in askew: ",asum(zero)
denom = denom + zero # prevent divide-by-zero
return N.where(zero, 0, amoment(a,3,dimension)/denom)
def akurtosis(a,dimension=None):
"""
Returns the kurtosis of a distribution (normal ==> 3.0; >3 means
heavier in the tails, and usually more peaked). Use akurtosistest()
to see if it's close enough. Dimension can equal None (ravel array
first), an integer (the dimension over which to operate), or a
sequence (operate over multiple dimensions).
Usage: akurtosis(a,dimension=None)
Returns: kurtosis of values in a along dimension, and ZERO where all vals equal
"""
denom = N.power(amoment(a,2,dimension),2)
zero = N.equal(denom,0)
if type(denom) == N.ndarray and asum(zero) <> 0:
print "Number of zeros in akurtosis: ",asum(zero)
denom = denom + zero # prevent divide-by-zero
return N.where(zero,0,amoment(a,4,dimension)/denom)
def adescribe(inarray,dimension=None):
"""
Returns several descriptive statistics of the passed array. Dimension
can equal None (ravel array first), an integer (the dimension over
which to operate), or a sequence (operate over multiple dimensions).
Usage: adescribe(inarray,dimension=None)
Returns: n, (min,max), mean, standard deviation, skew, kurtosis
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
n = inarray.shape[dimension]
mm = (N.minimum.reduce(inarray),N.maximum.reduce(inarray))
m = amean(inarray,dimension)
sd = astdev(inarray,dimension)
skew = askew(inarray,dimension)
kurt = akurtosis(inarray,dimension)
return n, mm, m, sd, skew, kurt
#####################################
######## NORMALITY TESTS ##########
#####################################
def askewtest(a,dimension=None):
"""
Tests whether the skew is significantly different from a normal
distribution. Dimension can equal None (ravel array first), an
integer (the dimension over which to operate), or a sequence (operate
over multiple dimensions).
Usage: askewtest(a,dimension=None)
Returns: z-score and 2-tail z-probability
"""
if dimension == None:
a = N.ravel(a)
dimension = 0
b2 = askew(a,dimension)
n = float(a.shape[dimension])
y = b2 * N.sqrt(((n+1)*(n+3)) / (6.0*(n-2)) )
beta2 = ( 3.0*(n*n+27*n-70)*(n+1)*(n+3) ) / ( (n-2.0)*(n+5)*(n+7)*(n+9) )
W2 = -1 + N.sqrt(2*(beta2-1))
delta = 1/N.sqrt(N.log(N.sqrt(W2)))
alpha = N.sqrt(2/(W2-1))
y = N.where(y==0,1,y)
Z = delta*N.log(y/alpha + N.sqrt((y/alpha)**2+1))
return Z, (1.0-zprob(Z))*2
def akurtosistest(a,dimension=None):
"""
Tests whether a dataset has normal kurtosis (i.e.,
kurtosis=3(n-1)/(n+1)) Valid only for n>20. Dimension can equal None
(ravel array first), an integer (the dimension over which to operate),
or a sequence (operate over multiple dimensions).
Usage: akurtosistest(a,dimension=None)
Returns: z-score and 2-tail z-probability, returns 0 for bad pixels
"""
if dimension == None:
a = N.ravel(a)
dimension = 0
n = float(a.shape[dimension])
if n<20:
print "akurtosistest only valid for n>=20 ... continuing anyway, n=",n
b2 = akurtosis(a,dimension)
E = 3.0*(n-1) /(n+1)
varb2 = 24.0*n*(n-2)*(n-3) / ((n+1)*(n+1)*(n+3)*(n+5))
x = (b2-E)/N.sqrt(varb2)
sqrtbeta1 = 6.0*(n*n-5*n+2)/((n+7)*(n+9)) * N.sqrt((6.0*(n+3)*(n+5))/
(n*(n-2)*(n-3)))
A = 6.0 + 8.0/sqrtbeta1 *(2.0/sqrtbeta1 + N.sqrt(1+4.0/(sqrtbeta1**2)))
term1 = 1 -2/(9.0*A)
denom = 1 +x*N.sqrt(2/(A-4.0))
denom = N.where(N.less(denom,0), 99, denom)
term2 = N.where(N.equal(denom,0), term1, N.power((1-2.0/A)/denom,1/3.0))
Z = ( term1 - term2 ) / N.sqrt(2/(9.0*A))
Z = N.where(N.equal(denom,99), 0, Z)
return Z, (1.0-zprob(Z))*2
def anormaltest(a,dimension=None):
"""
Tests whether skew and/OR kurtosis of dataset differs from normal
curve. Can operate over multiple dimensions. Dimension can equal
None (ravel array first), an integer (the dimension over which to
operate), or a sequence (operate over multiple dimensions).
Usage: anormaltest(a,dimension=None)
Returns: z-score and 2-tail probability
"""
if dimension == None:
a = N.ravel(a)
dimension = 0
s,p = askewtest(a,dimension)
k,p = akurtosistest(a,dimension)
k2 = N.power(s,2) + N.power(k,2)
return k2, achisqprob(k2,2)
#####################################
###### AFREQUENCY FUNCTIONS #######
#####################################
def aitemfreq(a):
"""
Returns a 2D array of item frequencies. Column 1 contains item values,
column 2 contains their respective counts. Assumes a 1D array is passed.
@@@sorting OK?
Usage: aitemfreq(a)
Returns: a 2D frequency table (col [0:n-1]=scores, col n=frequencies)
"""
scores = pstat.aunique(a)
scores = N.sort(scores)
freq = N.zeros(len(scores))
for i in range(len(scores)):
freq[i] = N.add.reduce(N.equal(a,scores[i]))
return N.array(pstat.aabut(scores, freq))
def ascoreatpercentile (inarray, percent):
"""
Usage: ascoreatpercentile(inarray,percent) 0<percent<100
Returns: score at given percentile, relative to inarray distribution
"""
percent = percent / 100.0
targetcf = percent*len(inarray)
h, lrl, binsize, extras = histogram(inarray)
cumhist = cumsum(h*1)
for i in range(len(cumhist)):
if cumhist[i] >= targetcf:
break
score = binsize * ((targetcf - cumhist[i-1]) / float(h[i])) + (lrl+binsize*i)
return score
def apercentileofscore (inarray,score,histbins=10,defaultlimits=None):
"""
Note: result of this function depends on the values used to histogram
the data(!).
Usage: apercentileofscore(inarray,score,histbins=10,defaultlimits=None)
Returns: percentile-position of score (0-100) relative to inarray
"""
h, lrl, binsize, extras = histogram(inarray,histbins,defaultlimits)
cumhist = cumsum(h*1)
i = int((score - lrl)/float(binsize))
pct = (cumhist[i-1]+((score-(lrl+binsize*i))/float(binsize))*h[i])/float(len(inarray)) * 100
return pct
def ahistogram (inarray,numbins=10,defaultlimits=None,printextras=1):
"""
Returns (i) an array of histogram bin counts, (ii) the smallest value
of the histogram binning, and (iii) the bin width (the last 2 are not
necessarily integers). Default number of bins is 10. Defaultlimits
can be None (the routine picks bins spanning all the numbers in the
inarray) or a 2-sequence (lowerlimit, upperlimit). Returns all of the
following: array of bin values, lowerreallimit, binsize, extrapoints.
Usage: ahistogram(inarray,numbins=10,defaultlimits=None,printextras=1)
Returns: (array of bin counts, bin-minimum, min-width, #-points-outside-range)
"""
inarray = N.ravel(inarray) # flatten any >1D arrays
if (defaultlimits <> None):
lowerreallimit = defaultlimits[0]
upperreallimit = defaultlimits[1]
binsize = (upperreallimit-lowerreallimit) / float(numbins)
else:
Min = N.minimum.reduce(inarray)
Max = N.maximum.reduce(inarray)
estbinwidth = float(Max - Min)/float(numbins) + 1e-6
binsize = (Max-Min+estbinwidth)/float(numbins)
lowerreallimit = Min - binsize/2.0 #lower real limit,1st bin
bins = N.zeros(numbins)
extrapoints = 0
for num in inarray:
try:
if (num-lowerreallimit) < 0:
extrapoints = extrapoints + 1
else:
bintoincrement = int((num-lowerreallimit) / float(binsize))
bins[bintoincrement] = bins[bintoincrement] + 1
except: # point outside lower/upper limits
extrapoints = extrapoints + 1
if (extrapoints > 0 and printextras == 1):
print '\nPoints outside given histogram range =',extrapoints
return (bins, lowerreallimit, binsize, extrapoints)
def acumfreq(a,numbins=10,defaultreallimits=None):
"""
Returns a cumulative frequency histogram, using the histogram function.
Defaultreallimits can be None (use all data), or a 2-sequence containing
lower and upper limits on values to include.
Usage: acumfreq(a,numbins=10,defaultreallimits=None)
Returns: array of cumfreq bin values, lowerreallimit, binsize, extrapoints
"""
h,l,b,e = histogram(a,numbins,defaultreallimits)
cumhist = cumsum(h*1)
return cumhist,l,b,e
def arelfreq(a,numbins=10,defaultreallimits=None):
"""
Returns a relative frequency histogram, using the histogram function.
Defaultreallimits can be None (use all data), or a 2-sequence containing
lower and upper limits on values to include.
Usage: arelfreq(a,numbins=10,defaultreallimits=None)
Returns: array of cumfreq bin values, lowerreallimit, binsize, extrapoints
"""
h,l,b,e = histogram(a,numbins,defaultreallimits)
h = N.array(h/float(a.shape[0]))
return h,l,b,e
#####################################
###### AVARIABILITY FUNCTIONS #####
#####################################
def aobrientransform(*args):
"""
Computes a transform on input data (any number of columns). Used to
test for homogeneity of variance prior to running one-way stats. Each
array in *args is one level of a factor. If an F_oneway() run on the
transformed data and found significant, variances are unequal. From
Maxwell and Delaney, p.112.
Usage: aobrientransform(*args) *args = 1D arrays, one per level of factor
Returns: transformed data for use in an ANOVA
"""
TINY = 1e-10
k = len(args)
n = N.zeros(k,N.float_)
v = N.zeros(k,N.float_)
m = N.zeros(k,N.float_)
nargs = []
for i in range(k):
nargs.append(args[i].astype(N.float_))
n[i] = float(len(nargs[i]))
v[i] = var(nargs[i])
m[i] = mean(nargs[i])
for j in range(k):
for i in range(n[j]):
t1 = (n[j]-1.5)*n[j]*(nargs[j][i]-m[j])**2
t2 = 0.5*v[j]*(n[j]-1.0)
t3 = (n[j]-1.0)*(n[j]-2.0)
nargs[j][i] = (t1-t2) / float(t3)
check = 1
for j in range(k):
if v[j] - mean(nargs[j]) > TINY:
check = 0
if check <> 1:
raise ValueError, 'Lack of convergence in obrientransform.'
else:
return N.array(nargs)
def asamplevar (inarray,dimension=None,keepdims=0):
"""
Returns the sample standard deviation of the values in the passed
array (i.e., using N). Dimension can equal None (ravel array first),
an integer (the dimension over which to operate), or a sequence
(operate over multiple dimensions). Set keepdims=1 to return an array
with the same number of dimensions as inarray.
Usage: asamplevar(inarray,dimension=None,keepdims=0)
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
if dimension == 1:
mn = amean(inarray,dimension)[:,N.NewAxis]
else:
mn = amean(inarray,dimension,keepdims=1)
deviations = inarray - mn
if type(dimension) == ListType:
n = 1
for d in dimension:
n = n*inarray.shape[d]
else:
n = inarray.shape[dimension]
svar = ass(deviations,dimension,keepdims) / float(n)
return svar
def asamplestdev (inarray, dimension=None, keepdims=0):
"""
Returns the sample standard deviation of the values in the passed
array (i.e., using N). Dimension can equal None (ravel array first),
an integer (the dimension over which to operate), or a sequence
(operate over multiple dimensions). Set keepdims=1 to return an array
with the same number of dimensions as inarray.
Usage: asamplestdev(inarray,dimension=None,keepdims=0)
"""
return N.sqrt(asamplevar(inarray,dimension,keepdims))
def asignaltonoise(instack,dimension=0):
"""
Calculates signal-to-noise. Dimension can equal None (ravel array
first), an integer (the dimension over which to operate), or a
sequence (operate over multiple dimensions).
Usage: asignaltonoise(instack,dimension=0):
Returns: array containing the value of (mean/stdev) along dimension,
or 0 when stdev=0
"""
m = mean(instack,dimension)
sd = stdev(instack,dimension)
return N.where(sd==0,0,m/sd)
def acov (x,y, dimension=None,keepdims=0):
"""
Returns the estimated covariance of the values in the passed
array (i.e., N-1). Dimension can equal None (ravel array first), an
integer (the dimension over which to operate), or a sequence (operate
over multiple dimensions). Set keepdims=1 to return an array with the
same number of dimensions as inarray.
Usage: acov(x,y,dimension=None,keepdims=0)
"""
if dimension == None:
x = N.ravel(x)
y = N.ravel(y)
dimension = 0
xmn = amean(x,dimension,1) # keepdims
xdeviations = x - xmn
ymn = amean(y,dimension,1) # keepdims
ydeviations = y - ymn
if type(dimension) == ListType:
n = 1
for d in dimension:
n = n*x.shape[d]
else:
n = x.shape[dimension]
covar = N.sum(xdeviations*ydeviations)/float(n-1)
return covar
def avar (inarray, dimension=None,keepdims=0):
"""
Returns the estimated population variance of the values in the passed
array (i.e., N-1). Dimension can equal None (ravel array first), an
integer (the dimension over which to operate), or a sequence (operate
over multiple dimensions). Set keepdims=1 to return an array with the
same number of dimensions as inarray.
Usage: avar(inarray,dimension=None,keepdims=0)
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
mn = amean(inarray,dimension,1)
deviations = inarray - mn
if type(dimension) == ListType:
n = 1
for d in dimension:
n = n*inarray.shape[d]
else:
n = inarray.shape[dimension]
var = ass(deviations,dimension,keepdims)/float(n-1)
return var
def astdev (inarray, dimension=None, keepdims=0):
"""
Returns the estimated population standard deviation of the values in
the passed array (i.e., N-1). Dimension can equal None (ravel array
first), an integer (the dimension over which to operate), or a
sequence (operate over multiple dimensions). Set keepdims=1 to return
an array with the same number of dimensions as inarray.
Usage: astdev(inarray,dimension=None,keepdims=0)
"""
return N.sqrt(avar(inarray,dimension,keepdims))
def asterr (inarray, dimension=None, keepdims=0):
"""
Returns the estimated population standard error of the values in the
passed array (i.e., N-1). Dimension can equal None (ravel array
first), an integer (the dimension over which to operate), or a
sequence (operate over multiple dimensions). Set keepdims=1 to return
an array with the same number of dimensions as inarray.
Usage: asterr(inarray,dimension=None,keepdims=0)
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
return astdev(inarray,dimension,keepdims) / float(N.sqrt(inarray.shape[dimension]))
def asem (inarray, dimension=None, keepdims=0):
"""
Returns the standard error of the mean (i.e., using N) of the values
in the passed array. Dimension can equal None (ravel array first), an
integer (the dimension over which to operate), or a sequence (operate
over multiple dimensions). Set keepdims=1 to return an array with the
same number of dimensions as inarray.
Usage: asem(inarray,dimension=None, keepdims=0)
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
if type(dimension) == ListType:
n = 1
for d in dimension:
n = n*inarray.shape[d]
else:
n = inarray.shape[dimension]
s = asamplestdev(inarray,dimension,keepdims) / N.sqrt(n-1)
return s
def az (a, score):
"""
Returns the z-score of a given input score, given thearray from which
that score came. Not appropriate for population calculations, nor for
arrays > 1D.
Usage: az(a, score)
"""
z = (score-amean(a)) / asamplestdev(a)
return z
def azs (a):
"""
Returns a 1D array of z-scores, one for each score in the passed array,
computed relative to the passed array.
Usage: azs(a)
"""
zscores = []
for item in a:
zscores.append(z(a,item))
return N.array(zscores)
def azmap (scores, compare, dimension=0):
"""
Returns an array of z-scores the shape of scores (e.g., [x,y]), compared to
array passed to compare (e.g., [time,x,y]). Assumes collapsing over dim 0
of the compare array.
Usage: azs(scores, compare, dimension=0)
"""
mns = amean(compare,dimension)
sstd = asamplestdev(compare,0)
return (scores - mns) / sstd
#####################################
####### ATRIMMING FUNCTIONS #######
#####################################
## deleted around() as it's in numpy now
def athreshold(a,threshmin=None,threshmax=None,newval=0):
"""
Like Numeric.clip() except that values <threshmid or >threshmax are replaced
by newval instead of by threshmin/threshmax (respectively).
Usage: athreshold(a,threshmin=None,threshmax=None,newval=0)
Returns: a, with values <threshmin or >threshmax replaced with newval
"""
mask = N.zeros(a.shape)
if threshmin <> None:
mask = mask + N.where(a<threshmin,1,0)
if threshmax <> None:
mask = mask + N.where(a>threshmax,1,0)
mask = N.clip(mask,0,1)
return N.where(mask,newval,a)
def atrimboth (a,proportiontocut):
"""
Slices off the passed proportion of items from BOTH ends of the passed
array (i.e., with proportiontocut=0.1, slices 'leftmost' 10% AND
'rightmost' 10% of scores. You must pre-sort the array if you want
"proper" trimming. Slices off LESS if proportion results in a
non-integer slice index (i.e., conservatively slices off
proportiontocut).
Usage: atrimboth (a,proportiontocut)
Returns: trimmed version of array a
"""
lowercut = int(proportiontocut*len(a))
uppercut = len(a) - lowercut
return a[lowercut:uppercut]
def atrim1 (a,proportiontocut,tail='right'):
"""
Slices off the passed proportion of items from ONE end of the passed
array (i.e., if proportiontocut=0.1, slices off 'leftmost' or 'rightmost'
10% of scores). Slices off LESS if proportion results in a non-integer
slice index (i.e., conservatively slices off proportiontocut).
Usage: atrim1(a,proportiontocut,tail='right') or set tail='left'
Returns: trimmed version of array a
"""
if string.lower(tail) == 'right':
lowercut = 0
uppercut = len(a) - int(proportiontocut*len(a))
elif string.lower(tail) == 'left':
lowercut = int(proportiontocut*len(a))
uppercut = len(a)
return a[lowercut:uppercut]
#####################################
##### ACORRELATION FUNCTIONS ######
#####################################
def acovariance(X):
"""
Computes the covariance matrix of a matrix X. Requires a 2D matrix input.
Usage: acovariance(X)
Returns: covariance matrix of X
"""
if len(X.shape) <> 2:
raise TypeError, "acovariance requires 2D matrices"
n = X.shape[0]
mX = amean(X,0)
return N.dot(N.transpose(X),X) / float(n) - N.multiply.outer(mX,mX)
def acorrelation(X):
"""
Computes the correlation matrix of a matrix X. Requires a 2D matrix input.
Usage: acorrelation(X)
Returns: correlation matrix of X
"""
C = acovariance(X)
V = N.diagonal(C)
return C / N.sqrt(N.multiply.outer(V,V))
def apaired(x,y):
"""
Interactively determines the type of data in x and y, and then runs the
appropriated statistic for paired group data.
Usage: apaired(x,y) x,y = the two arrays of values to be compared
Returns: appropriate statistic name, value, and probability
"""
samples = ''
while samples not in ['i','r','I','R','c','C']:
print '\nIndependent or related samples, or correlation (i,r,c): ',
samples = raw_input()
if samples in ['i','I','r','R']:
print '\nComparing variances ...',
# USE O'BRIEN'S TEST FOR HOMOGENEITY OF VARIANCE, Maxwell & delaney, p.112
r = obrientransform(x,y)
f,p = F_oneway(pstat.colex(r,0),pstat.colex(r,1))
if p<0.05:
vartype='unequal, p='+str(round(p,4))
else:
vartype='equal'
print vartype
if samples in ['i','I']:
if vartype[0]=='e':
t,p = ttest_ind(x,y,None,0)
print '\nIndependent samples t-test: ', round(t,4),round(p,4)
else:
if len(x)>20 or len(y)>20:
z,p = ranksums(x,y)
print '\nRank Sums test (NONparametric, n>20): ', round(z,4),round(p,4)
else:
u,p = mannwhitneyu(x,y)
print '\nMann-Whitney U-test (NONparametric, ns<20): ', round(u,4),round(p,4)
else: # RELATED SAMPLES
if vartype[0]=='e':
t,p = ttest_rel(x,y,0)
print '\nRelated samples t-test: ', round(t,4),round(p,4)
else:
t,p = ranksums(x,y)
print '\nWilcoxon T-test (NONparametric): ', round(t,4),round(p,4)
else: # CORRELATION ANALYSIS
corrtype = ''
while corrtype not in ['c','C','r','R','d','D']:
print '\nIs the data Continuous, Ranked, or Dichotomous (c,r,d): ',
corrtype = raw_input()
if corrtype in ['c','C']:
m,b,r,p,see = linregress(x,y)
print '\nLinear regression for continuous variables ...'
lol = [['Slope','Intercept','r','Prob','SEestimate'],[round(m,4),round(b,4),round(r,4),round(p,4),round(see,4)]]
pstat.printcc(lol)
elif corrtype in ['r','R']:
r,p = spearmanr(x,y)
print '\nCorrelation for ranked variables ...'
print "Spearman's r: ",round(r,4),round(p,4)
else: # DICHOTOMOUS
r,p = pointbiserialr(x,y)
print '\nAssuming x contains a dichotomous variable ...'
print 'Point Biserial r: ',round(r,4),round(p,4)
print '\n\n'
return None
def dices(x,y):
"""
Calculates Dice's coefficient ... (2*number of common terms)/(number of terms in x +
number of terms in y). Returns a value between 0 (orthogonal) and 1.
Usage: dices(x,y)
"""
import sets
x = sets.Set(x)
y = sets.Set(y)
common = len(x.intersection(y))
total = float(len(x) + len(y))
return 2*common/total
def icc(x,y=None,verbose=0):
"""
Calculates intraclass correlation coefficients using simple, Type I sums of squares.
If only one variable is passed, assumed it's an Nx2 matrix
Usage: icc(x,y=None,verbose=0)
Returns: icc rho, prob ####PROB IS A GUESS BASED ON PEARSON
"""
TINY = 1.0e-20
if y:
all = N.concatenate([x,y],0)
else:
all = x+0
x = all[:,0]
y = all[:,1]
totalss = ass(all-mean(all))
pairmeans = (x+y)/2.
withinss = ass(x-pairmeans) + ass(y-pairmeans)
withindf = float(len(x))
betwdf = float(len(x)-1)
withinms = withinss / withindf
betweenms = (totalss-withinss) / betwdf
rho = (betweenms-withinms)/(withinms+betweenms)
t = rho*math.sqrt(betwdf/((1.0-rho+TINY)*(1.0+rho+TINY)))
prob = abetai(0.5*betwdf,0.5,betwdf/(betwdf+t*t),verbose)
return rho, prob
def alincc(x,y):
"""
Calculates Lin's concordance correlation coefficient.
Usage: alincc(x,y) where x, y are equal-length arrays
Returns: Lin's CC
"""
x = N.ravel(x)
y = N.ravel(y)
covar = acov(x,y)*(len(x)-1)/float(len(x)) # correct denom to n
xvar = avar(x)*(len(x)-1)/float(len(x)) # correct denom to n
yvar = avar(y)*(len(y)-1)/float(len(y)) # correct denom to n
lincc = (2 * covar) / ((xvar+yvar) +((amean(x)-amean(y))**2))
return lincc
def apearsonr(x,y,verbose=1):
"""
Calculates a Pearson correlation coefficient and returns p. Taken
from Heiman's Basic Statistics for the Behav. Sci (2nd), p.195.
Usage: apearsonr(x,y,verbose=1) where x,y are equal length arrays
Returns: Pearson's r, two-tailed p-value
"""
TINY = 1.0e-20
n = len(x)
xmean = amean(x)
ymean = amean(y)
r_num = n*(N.add.reduce(x*y)) - N.add.reduce(x)*N.add.reduce(y)
r_den = math.sqrt((n*ass(x) - asquare_of_sums(x))*(n*ass(y)-asquare_of_sums(y)))
r = (r_num / r_den)
df = n-2
t = r*math.sqrt(df/((1.0-r+TINY)*(1.0+r+TINY)))
prob = abetai(0.5*df,0.5,df/(df+t*t),verbose)
return r,prob
def aspearmanr(x,y):
"""
Calculates a Spearman rank-order correlation coefficient. Taken
from Heiman's Basic Statistics for the Behav. Sci (1st), p.192.
Usage: aspearmanr(x,y) where x,y are equal-length arrays
Returns: Spearman's r, two-tailed p-value
"""
TINY = 1e-30
n = len(x)
rankx = rankdata(x)
ranky = rankdata(y)
dsq = N.add.reduce((rankx-ranky)**2)
rs = 1 - 6*dsq / float(n*(n**2-1))
t = rs * math.sqrt((n-2) / ((rs+1.0)*(1.0-rs)))
df = n-2
probrs = abetai(0.5*df,0.5,df/(df+t*t))
# probability values for rs are from part 2 of the spearman function in
# Numerical Recipies, p.510. They close to tables, but not exact.(?)
return rs, probrs
def apointbiserialr(x,y):
"""
Calculates a point-biserial correlation coefficient and the associated
probability value. Taken from Heiman's Basic Statistics for the Behav.
Sci (1st), p.194.
Usage: apointbiserialr(x,y) where x,y are equal length arrays
Returns: Point-biserial r, two-tailed p-value
"""
TINY = 1e-30
categories = pstat.aunique(x)
data = pstat.aabut(x,y)
if len(categories) <> 2:
raise ValueError, "Exactly 2 categories required (in x) for pointbiserialr()."
else: # there are 2 categories, continue
codemap = pstat.aabut(categories,N.arange(2))
recoded = pstat.arecode(data,codemap,0)
x = pstat.alinexand(data,0,categories[0])
y = pstat.alinexand(data,0,categories[1])
xmean = amean(pstat.acolex(x,1))
ymean = amean(pstat.acolex(y,1))
n = len(data)
adjust = math.sqrt((len(x)/float(n))*(len(y)/float(n)))
rpb = (ymean - xmean)/asamplestdev(pstat.acolex(data,1))*adjust
df = n-2
t = rpb*math.sqrt(df/((1.0-rpb+TINY)*(1.0+rpb+TINY)))
prob = abetai(0.5*df,0.5,df/(df+t*t))
return rpb, prob
def akendalltau(x,y):
"""
Calculates Kendall's tau ... correlation of ordinal data. Adapted
from function kendl1 in Numerical Recipies. Needs good test-cases.@@@
Usage: akendalltau(x,y)
Returns: Kendall's tau, two-tailed p-value
"""
n1 = 0
n2 = 0
iss = 0
for j in range(len(x)-1):
for k in range(j,len(y)):
a1 = x[j] - x[k]
a2 = y[j] - y[k]
aa = a1 * a2
if (aa): # neither array has a tie
n1 = n1 + 1
n2 = n2 + 1
if aa > 0:
iss = iss + 1
else:
iss = iss -1
else:
if (a1):
n1 = n1 + 1
else:
n2 = n2 + 1
tau = iss / math.sqrt(n1*n2)
svar = (4.0*len(x)+10.0) / (9.0*len(x)*(len(x)-1))
z = tau / math.sqrt(svar)
prob = erfcc(abs(z)/1.4142136)
return tau, prob
def alinregress(*args):
"""
Calculates a regression line on two arrays, x and y, corresponding to x,y
pairs. If a single 2D array is passed, alinregress finds dim with 2 levels
and splits data into x,y pairs along that dim.
Usage: alinregress(*args) args=2 equal-length arrays, or one 2D array
Returns: slope, intercept, r, two-tailed prob, sterr-of-the-estimate, n
"""
TINY = 1.0e-20
if len(args) == 1: # more than 1D array?
args = args[0]
if len(args) == 2:
x = args[0]
y = args[1]
else:
x = args[:,0]
y = args[:,1]
else:
x = args[0]
y = args[1]
n = len(x)
xmean = amean(x)
ymean = amean(y)
r_num = n*(N.add.reduce(x*y)) - N.add.reduce(x)*N.add.reduce(y)
r_den = math.sqrt((n*ass(x) - asquare_of_sums(x))*(n*ass(y)-asquare_of_sums(y)))
r = r_num / r_den
z = 0.5*math.log((1.0+r+TINY)/(1.0-r+TINY))
df = n-2
t = r*math.sqrt(df/((1.0-r+TINY)*(1.0+r+TINY)))
prob = abetai(0.5*df,0.5,df/(df+t*t))
slope = r_num / (float(n)*ass(x) - asquare_of_sums(x))
intercept = ymean - slope*xmean
sterrest = math.sqrt(1-r*r)*asamplestdev(y)
return slope, intercept, r, prob, sterrest, n
def amasslinregress(*args):
"""
Calculates a regression line on one 1D array (x) and one N-D array (y).
Returns: slope, intercept, r, two-tailed prob, sterr-of-the-estimate, n
"""
TINY = 1.0e-20
if len(args) == 1: # more than 1D array?
args = args[0]
if len(args) == 2:
x = N.ravel(args[0])
y = args[1]
else:
x = N.ravel(args[:,0])
y = args[:,1]
else:
x = args[0]
y = args[1]
x = x.astype(N.float_)
y = y.astype(N.float_)
n = len(x)
xmean = amean(x)
ymean = amean(y,0)
shp = N.ones(len(y.shape))
shp[0] = len(x)
x.shape = shp
print x.shape, y.shape
r_num = n*(N.add.reduce(x*y,0)) - N.add.reduce(x)*N.add.reduce(y,0)
r_den = N.sqrt((n*ass(x) - asquare_of_sums(x))*(n*ass(y,0)-asquare_of_sums(y,0)))
zerodivproblem = N.equal(r_den,0)
r_den = N.where(zerodivproblem,1,r_den) # avoid zero-division in 1st place
r = r_num / r_den # need to do this nicely for matrix division
r = N.where(zerodivproblem,0.0,r)
z = 0.5*N.log((1.0+r+TINY)/(1.0-r+TINY))
df = n-2
t = r*N.sqrt(df/((1.0-r+TINY)*(1.0+r+TINY)))
prob = abetai(0.5*df,0.5,df/(df+t*t))
ss = float(n)*ass(x)-asquare_of_sums(x)
s_den = N.where(ss==0,1,ss) # avoid zero-division in 1st place
slope = r_num / s_den
intercept = ymean - slope*xmean
sterrest = N.sqrt(1-r*r)*asamplestdev(y,0)
return slope, intercept, r, prob, sterrest, n
#####################################
##### AINFERENTIAL STATISTICS #####
#####################################
def attest_1samp(a,popmean,printit=0,name='Sample',writemode='a'):
"""
Calculates the t-obtained for the independent samples T-test on ONE group
of scores a, given a population mean. If printit=1, results are printed
to the screen. If printit='filename', the results are output to 'filename'
using the given writemode (default=append). Returns t-value, and prob.
Usage: attest_1samp(a,popmean,Name='Sample',printit=0,writemode='a')
Returns: t-value, two-tailed prob
"""
if type(a) != N.ndarray:
a = N.array(a)
x = amean(a)
v = avar(a)
n = len(a)
df = n-1
svar = ((n-1)*v) / float(df)
t = (x-popmean)/math.sqrt(svar*(1.0/n))
prob = abetai(0.5*df,0.5,df/(df+t*t))
if printit <> 0:
statname = 'Single-sample T-test.'
outputpairedstats(printit,writemode,
'Population','--',popmean,0,0,0,
name,n,x,v,N.minimum.reduce(N.ravel(a)),
N.maximum.reduce(N.ravel(a)),
statname,t,prob)
return t,prob
def attest_ind (a, b, dimension=None, printit=0, name1='Samp1', name2='Samp2',writemode='a'):
"""
Calculates the t-obtained T-test on TWO INDEPENDENT samples of scores
a, and b. From Numerical Recipies, p.483. If printit=1, results are
printed to the screen. If printit='filename', the results are output
to 'filename' using the given writemode (default=append). Dimension
can equal None (ravel array first), or an integer (the dimension over
which to operate on a and b).
Usage: attest_ind (a,b,dimension=None,printit=0,
Name1='Samp1',Name2='Samp2',writemode='a')
Returns: t-value, two-tailed p-value
"""
if dimension == None:
a = N.ravel(a)
b = N.ravel(b)
dimension = 0
x1 = amean(a,dimension)
x2 = amean(b,dimension)
v1 = avar(a,dimension)
v2 = avar(b,dimension)
n1 = a.shape[dimension]
n2 = b.shape[dimension]
df = n1+n2-2
svar = ((n1-1)*v1+(n2-1)*v2) / float(df)
zerodivproblem = N.equal(svar,0)
svar = N.where(zerodivproblem,1,svar) # avoid zero-division in 1st place
t = (x1-x2)/N.sqrt(svar*(1.0/n1 + 1.0/n2)) # N-D COMPUTATION HERE!!!!!!
t = N.where(zerodivproblem,1.0,t) # replace NaN/wrong t-values with 1.0
probs = abetai(0.5*df,0.5,float(df)/(df+t*t))
if type(t) == N.ndarray:
probs = N.reshape(probs,t.shape)
if probs.shape == (1,):
probs = probs[0]
if printit <> 0:
if type(t) == N.ndarray:
t = t[0]
if type(probs) == N.ndarray:
probs = probs[0]
statname = 'Independent samples T-test.'
outputpairedstats(printit,writemode,
name1,n1,x1,v1,N.minimum.reduce(N.ravel(a)),
N.maximum.reduce(N.ravel(a)),
name2,n2,x2,v2,N.minimum.reduce(N.ravel(b)),
N.maximum.reduce(N.ravel(b)),
statname,t,probs)
return
return t, probs
def ap2t(pval,df):
"""
Tries to compute a t-value from a p-value (or pval array) and associated df.
SLOW for large numbers of elements(!) as it re-computes p-values 20 times
(smaller step-sizes) at which point it decides it's done. Keeps the signs
of the input array. Returns 1000 (or -1000) if t>100.
Usage: ap2t(pval,df)
Returns: an array of t-values with the shape of pval
"""
pval = N.array(pval)
signs = N.sign(pval)
pval = abs(pval)
t = N.ones(pval.shape,N.float_)*50
step = N.ones(pval.shape,N.float_)*25
print "Initial ap2t() prob calc"
prob = abetai(0.5*df,0.5,float(df)/(df+t*t))
print 'ap2t() iter: ',
for i in range(10):
print i,' ',
t = N.where(pval<prob,t+step,t-step)
prob = abetai(0.5*df,0.5,float(df)/(df+t*t))
step = step/2
print
# since this is an ugly hack, we get ugly boundaries
t = N.where(t>99.9,1000,t) # hit upper-boundary
t = t+signs
return t #, prob, pval
def attest_rel (a,b,dimension=None,printit=0,name1='Samp1',name2='Samp2',writemode='a'):
"""
Calculates the t-obtained T-test on TWO RELATED samples of scores, a
and b. From Numerical Recipies, p.483. If printit=1, results are
printed to the screen. If printit='filename', the results are output
to 'filename' using the given writemode (default=append). Dimension
can equal None (ravel array first), or an integer (the dimension over
which to operate on a and b).
Usage: attest_rel(a,b,dimension=None,printit=0,
name1='Samp1',name2='Samp2',writemode='a')
Returns: t-value, two-tailed p-value
"""
if dimension == None:
a = N.ravel(a)
b = N.ravel(b)
dimension = 0
if len(a)<>len(b):
raise ValueError, 'Unequal length arrays.'
x1 = amean(a,dimension)
x2 = amean(b,dimension)
v1 = avar(a,dimension)
v2 = avar(b,dimension)
n = a.shape[dimension]
df = float(n-1)
d = (a-b).astype('d')
denom = N.sqrt((n*N.add.reduce(d*d,dimension) - N.add.reduce(d,dimension)**2) /df)
zerodivproblem = N.equal(denom,0)
denom = N.where(zerodivproblem,1,denom) # avoid zero-division in 1st place
t = N.add.reduce(d,dimension) / denom # N-D COMPUTATION HERE!!!!!!
t = N.where(zerodivproblem,1.0,t) # replace NaN/wrong t-values with 1.0
probs = abetai(0.5*df,0.5,float(df)/(df+t*t))
if type(t) == N.ndarray:
probs = N.reshape(probs,t.shape)
if probs.shape == (1,):
probs = probs[0]
if printit <> 0:
statname = 'Related samples T-test.'
outputpairedstats(printit,writemode,
name1,n,x1,v1,N.minimum.reduce(N.ravel(a)),
N.maximum.reduce(N.ravel(a)),
name2,n,x2,v2,N.minimum.reduce(N.ravel(b)),
N.maximum.reduce(N.ravel(b)),
statname,t,probs)
return
return t, probs
def achisquare(f_obs,f_exp=None):
"""
Calculates a one-way chi square for array of observed frequencies and returns
the result. If no expected frequencies are given, the total N is assumed to
be equally distributed across all groups.
@@@NOT RIGHT??
Usage: achisquare(f_obs, f_exp=None) f_obs = array of observed cell freq.
Returns: chisquare-statistic, associated p-value
"""
k = len(f_obs)
if f_exp == None:
f_exp = N.array([sum(f_obs)/float(k)] * len(f_obs),N.float_)
f_exp = f_exp.astype(N.float_)
chisq = N.add.reduce((f_obs-f_exp)**2 / f_exp)
return chisq, achisqprob(chisq, k-1)
def aks_2samp (data1,data2):
"""
Computes the Kolmogorov-Smirnof statistic on 2 samples. Modified from
Numerical Recipies in C, page 493. Returns KS D-value, prob. Not ufunc-
like.
Usage: aks_2samp(data1,data2) where data1 and data2 are 1D arrays
Returns: KS D-value, p-value
"""
j1 = 0 # N.zeros(data1.shape[1:]) TRIED TO MAKE THIS UFUNC-LIKE
j2 = 0 # N.zeros(data2.shape[1:])
fn1 = 0.0 # N.zeros(data1.shape[1:],N.float_)
fn2 = 0.0 # N.zeros(data2.shape[1:],N.float_)
n1 = data1.shape[0]
n2 = data2.shape[0]
en1 = n1*1
en2 = n2*1
d = N.zeros(data1.shape[1:],N.float_)
data1 = N.sort(data1,0)
data2 = N.sort(data2,0)
while j1 < n1 and j2 < n2:
d1=data1[j1]
d2=data2[j2]
if d1 <= d2:
fn1 = (j1)/float(en1)
j1 = j1 + 1
if d2 <= d1:
fn2 = (j2)/float(en2)
j2 = j2 + 1
dt = (fn2-fn1)
if abs(dt) > abs(d):
d = dt
# try:
en = math.sqrt(en1*en2/float(en1+en2))
prob = aksprob((en+0.12+0.11/en)*N.fabs(d))
# except:
# prob = 1.0
return d, prob
def amannwhitneyu(x,y):
"""
Calculates a Mann-Whitney U statistic on the provided scores and
returns the result. Use only when the n in each condition is < 20 and
you have 2 independent samples of ranks. REMEMBER: Mann-Whitney U is
significant if the u-obtained is LESS THAN or equal to the critical
value of U.
Usage: amannwhitneyu(x,y) where x,y are arrays of values for 2 conditions
Returns: u-statistic, one-tailed p-value (i.e., p(z(U)))
"""
n1 = len(x)
n2 = len(y)
ranked = rankdata(N.concatenate((x,y)))
rankx = ranked[0:n1] # get the x-ranks
ranky = ranked[n1:] # the rest are y-ranks
u1 = n1*n2 + (n1*(n1+1))/2.0 - sum(rankx) # calc U for x
u2 = n1*n2 - u1 # remainder is U for y
bigu = max(u1,u2)
smallu = min(u1,u2)
T = math.sqrt(tiecorrect(ranked)) # correction factor for tied scores
if T == 0:
raise ValueError, 'All numbers are identical in amannwhitneyu'
sd = math.sqrt(T*n1*n2*(n1+n2+1)/12.0)
z = abs((bigu-n1*n2/2.0) / sd) # normal approximation for prob calc
return smallu, 1.0 - azprob(z)
def atiecorrect(rankvals):
"""
Tie-corrector for ties in Mann Whitney U and Kruskal Wallis H tests.
See Siegel, S. (1956) Nonparametric Statistics for the Behavioral
Sciences. New York: McGraw-Hill. Code adapted from |Stat rankind.c
code.
Usage: atiecorrect(rankvals)
Returns: T correction factor for U or H
"""
sorted,posn = ashellsort(N.array(rankvals))
n = len(sorted)
T = 0.0
i = 0
while (i<n-1):
if sorted[i] == sorted[i+1]:
nties = 1
while (i<n-1) and (sorted[i] == sorted[i+1]):
nties = nties +1
i = i +1
T = T + nties**3 - nties
i = i+1
T = T / float(n**3-n)
return 1.0 - T
def aranksums(x,y):
"""
Calculates the rank sums statistic on the provided scores and returns
the result.
Usage: aranksums(x,y) where x,y are arrays of values for 2 conditions
Returns: z-statistic, two-tailed p-value
"""
n1 = len(x)
n2 = len(y)
alldata = N.concatenate((x,y))
ranked = arankdata(alldata)
x = ranked[:n1]
y = ranked[n1:]
s = sum(x)
expected = n1*(n1+n2+1) / 2.0
z = (s - expected) / math.sqrt(n1*n2*(n1+n2+1)/12.0)
prob = 2*(1.0 - azprob(abs(z)))
return z, prob
def awilcoxont(x,y):
"""
Calculates the Wilcoxon T-test for related samples and returns the
result. A non-parametric T-test.
Usage: awilcoxont(x,y) where x,y are equal-length arrays for 2 conditions
Returns: t-statistic, two-tailed p-value
"""
if len(x) <> len(y):
raise ValueError, 'Unequal N in awilcoxont. Aborting.'
d = x-y
d = N.compress(N.not_equal(d,0),d) # Keep all non-zero differences
count = len(d)
absd = abs(d)
absranked = arankdata(absd)
r_plus = 0.0
r_minus = 0.0
for i in range(len(absd)):
if d[i] < 0:
r_minus = r_minus + absranked[i]
else:
r_plus = r_plus + absranked[i]
wt = min(r_plus, r_minus)
mn = count * (count+1) * 0.25
se = math.sqrt(count*(count+1)*(2.0*count+1.0)/24.0)
z = math.fabs(wt-mn) / se
z = math.fabs(wt-mn) / se
prob = 2*(1.0 -zprob(abs(z)))
return wt, prob
def akruskalwallish(*args):
"""
The Kruskal-Wallis H-test is a non-parametric ANOVA for 3 or more
groups, requiring at least 5 subjects in each group. This function
calculates the Kruskal-Wallis H and associated p-value for 3 or more
independent samples.
Usage: akruskalwallish(*args) args are separate arrays for 3+ conditions
Returns: H-statistic (corrected for ties), associated p-value
"""
assert len(args) == 3, "Need at least 3 groups in stats.akruskalwallish()"
args = list(args)
n = [0]*len(args)
n = map(len,args)
all = []
for i in range(len(args)):
all = all + args[i].tolist()
ranked = rankdata(all)
T = tiecorrect(ranked)
for i in range(len(args)):
args[i] = ranked[0:n[i]]
del ranked[0:n[i]]
rsums = []
for i in range(len(args)):
rsums.append(sum(args[i])**2)
rsums[i] = rsums[i] / float(n[i])
ssbn = sum(rsums)
totaln = sum(n)
h = 12.0 / (totaln*(totaln+1)) * ssbn - 3*(totaln+1)
df = len(args) - 1
if T == 0:
raise ValueError, 'All numbers are identical in akruskalwallish'
h = h / float(T)
return h, chisqprob(h,df)
def afriedmanchisquare(*args):
"""
Friedman Chi-Square is a non-parametric, one-way within-subjects
ANOVA. This function calculates the Friedman Chi-square test for
repeated measures and returns the result, along with the associated
probability value. It assumes 3 or more repeated measures. Only 3
levels requires a minimum of 10 subjects in the study. Four levels
requires 5 subjects per level(??).
Usage: afriedmanchisquare(*args) args are separate arrays for 2+ conditions
Returns: chi-square statistic, associated p-value
"""
k = len(args)
if k < 3:
raise ValueError, '\nLess than 3 levels. Friedman test not appropriate.\n'
n = len(args[0])
data = apply(pstat.aabut,args)
data = data.astype(N.float_)
for i in range(len(data)):
data[i] = arankdata(data[i])
ssbn = asum(asum(args,1)**2)
chisq = 12.0 / (k*n*(k+1)) * ssbn - 3*n*(k+1)
return chisq, achisqprob(chisq,k-1)
#####################################
#### APROBABILITY CALCULATIONS ####
#####################################
def achisqprob(chisq,df):
"""
Returns the (1-tail) probability value associated with the provided chi-square
value and df. Heavily modified from chisq.c in Gary Perlman's |Stat. Can
handle multiple dimensions.
Usage: achisqprob(chisq,df) chisq=chisquare stat., df=degrees of freedom
"""
BIG = 200.0
def ex(x):
BIG = 200.0
exponents = N.where(N.less(x,-BIG),-BIG,x)
return N.exp(exponents)
if type(chisq) == N.ndarray:
arrayflag = 1
else:
arrayflag = 0
chisq = N.array([chisq])
if df < 1:
return N.ones(chisq.shape,N.float)
probs = N.zeros(chisq.shape,N.float_)
probs = N.where(N.less_equal(chisq,0),1.0,probs) # set prob=1 for chisq<0
a = 0.5 * chisq
if df > 1:
y = ex(-a)
if df%2 == 0:
even = 1
s = y*1
s2 = s*1
else:
even = 0
s = 2.0 * azprob(-N.sqrt(chisq))
s2 = s*1
if (df > 2):
chisq = 0.5 * (df - 1.0)
if even:
z = N.ones(probs.shape,N.float_)
else:
z = 0.5 *N.ones(probs.shape,N.float_)
if even:
e = N.zeros(probs.shape,N.float_)
else:
e = N.log(N.sqrt(N.pi)) *N.ones(probs.shape,N.float_)
c = N.log(a)
mask = N.zeros(probs.shape)
a_big = N.greater(a,BIG)
a_big_frozen = -1 *N.ones(probs.shape,N.float_)
totalelements = N.multiply.reduce(N.array(probs.shape))
while asum(mask)<>totalelements:
e = N.log(z) + e
s = s + ex(c*z-a-e)
z = z + 1.0
# print z, e, s
newmask = N.greater(z,chisq)
a_big_frozen = N.where(newmask*N.equal(mask,0)*a_big, s, a_big_frozen)
mask = N.clip(newmask+mask,0,1)
if even:
z = N.ones(probs.shape,N.float_)
e = N.ones(probs.shape,N.float_)
else:
z = 0.5 *N.ones(probs.shape,N.float_)
e = 1.0 / N.sqrt(N.pi) / N.sqrt(a) * N.ones(probs.shape,N.float_)
c = 0.0
mask = N.zeros(probs.shape)
a_notbig_frozen = -1 *N.ones(probs.shape,N.float_)
while asum(mask)<>totalelements:
e = e * (a/z.astype(N.float_))
c = c + e
z = z + 1.0
# print '#2', z, e, c, s, c*y+s2
newmask = N.greater(z,chisq)
a_notbig_frozen = N.where(newmask*N.equal(mask,0)*(1-a_big),
c*y+s2, a_notbig_frozen)
mask = N.clip(newmask+mask,0,1)
probs = N.where(N.equal(probs,1),1,
N.where(N.greater(a,BIG),a_big_frozen,a_notbig_frozen))
return probs
else:
return s
def aerfcc(x):
"""
Returns the complementary error function erfc(x) with fractional error
everywhere less than 1.2e-7. Adapted from Numerical Recipies. Can
handle multiple dimensions.
Usage: aerfcc(x)
"""
z = abs(x)
t = 1.0 / (1.0+0.5*z)
ans = t * N.exp(-z*z-1.26551223 + t*(1.00002368+t*(0.37409196+t*(0.09678418+t*(-0.18628806+t*(0.27886807+t*(-1.13520398+t*(1.48851587+t*(-0.82215223+t*0.17087277)))))))))
return N.where(N.greater_equal(x,0), ans, 2.0-ans)
def azprob(z):
"""
Returns the area under the normal curve 'to the left of' the given z value.
Thus,
for z<0, zprob(z) = 1-tail probability
for z>0, 1.0-zprob(z) = 1-tail probability
for any z, 2.0*(1.0-zprob(abs(z))) = 2-tail probability
Adapted from z.c in Gary Perlman's |Stat. Can handle multiple dimensions.
Usage: azprob(z) where z is a z-value
"""
def yfunc(y):
x = (((((((((((((-0.000045255659 * y
+0.000152529290) * y -0.000019538132) * y
-0.000676904986) * y +0.001390604284) * y
-0.000794620820) * y -0.002034254874) * y
+0.006549791214) * y -0.010557625006) * y
+0.011630447319) * y -0.009279453341) * y
+0.005353579108) * y -0.002141268741) * y
+0.000535310849) * y +0.999936657524
return x
def wfunc(w):
x = ((((((((0.000124818987 * w
-0.001075204047) * w +0.005198775019) * w
-0.019198292004) * w +0.059054035642) * w
-0.151968751364) * w +0.319152932694) * w
-0.531923007300) * w +0.797884560593) * N.sqrt(w) * 2.0
return x
Z_MAX = 6.0 # maximum meaningful z-value
x = N.zeros(z.shape,N.float_) # initialize
y = 0.5 * N.fabs(z)
x = N.where(N.less(y,1.0),wfunc(y*y),yfunc(y-2.0)) # get x's
x = N.where(N.greater(y,Z_MAX*0.5),1.0,x) # kill those with big Z
prob = N.where(N.greater(z,0),(x+1)*0.5,(1-x)*0.5)
return prob
def aksprob(alam):
"""
Returns the probability value for a K-S statistic computed via ks_2samp.
Adapted from Numerical Recipies. Can handle multiple dimensions.
Usage: aksprob(alam)
"""
if type(alam) == N.ndarray:
frozen = -1 *N.ones(alam.shape,N.float64)
alam = alam.astype(N.float64)
arrayflag = 1
else:
frozen = N.array(-1.)
alam = N.array(alam,N.float64)
arrayflag = 1
mask = N.zeros(alam.shape)
fac = 2.0 *N.ones(alam.shape,N.float_)
sum = N.zeros(alam.shape,N.float_)
termbf = N.zeros(alam.shape,N.float_)
a2 = N.array(-2.0*alam*alam,N.float64)
totalelements = N.multiply.reduce(N.array(mask.shape))
for j in range(1,201):
if asum(mask) == totalelements:
break
exponents = (a2*j*j)
overflowmask = N.less(exponents,-746)
frozen = N.where(overflowmask,0,frozen)
mask = mask+overflowmask
term = fac*N.exp(exponents)
sum = sum + term
newmask = N.where(N.less_equal(abs(term),(0.001*termbf)) +
N.less(abs(term),1.0e-8*sum), 1, 0)
frozen = N.where(newmask*N.equal(mask,0), sum, frozen)
mask = N.clip(mask+newmask,0,1)
fac = -fac
termbf = abs(term)
if arrayflag:
return N.where(N.equal(frozen,-1), 1.0, frozen) # 1.0 if doesn't converge
else:
return N.where(N.equal(frozen,-1), 1.0, frozen)[0] # 1.0 if doesn't converge
def afprob (dfnum, dfden, F):
"""
Returns the 1-tailed significance level (p-value) of an F statistic
given the degrees of freedom for the numerator (dfR-dfF) and the degrees
of freedom for the denominator (dfF). Can handle multiple dims for F.
Usage: afprob(dfnum, dfden, F) where usually dfnum=dfbn, dfden=dfwn
"""
if type(F) == N.ndarray:
return abetai(0.5*dfden, 0.5*dfnum, dfden/(1.0*dfden+dfnum*F))
else:
return abetai(0.5*dfden, 0.5*dfnum, dfden/float(dfden+dfnum*F))
def abetacf(a,b,x,verbose=1):
"""
Evaluates the continued fraction form of the incomplete Beta function,
betai. (Adapted from: Numerical Recipies in C.) Can handle multiple
dimensions for x.
Usage: abetacf(a,b,x,verbose=1)
"""
ITMAX = 200
EPS = 3.0e-7
arrayflag = 1
if type(x) == N.ndarray:
frozen = N.ones(x.shape,N.float_) *-1 #start out w/ -1s, should replace all
else:
arrayflag = 0
frozen = N.array([-1])
x = N.array([x])
mask = N.zeros(x.shape)
bm = az = am = 1.0
qab = a+b
qap = a+1.0
qam = a-1.0
bz = 1.0-qab*x/qap
for i in range(ITMAX+1):
if N.sum(N.ravel(N.equal(frozen,-1)))==0:
break
em = float(i+1)
tem = em + em
d = em*(b-em)*x/((qam+tem)*(a+tem))
ap = az + d*am
bp = bz+d*bm
d = -(a+em)*(qab+em)*x/((qap+tem)*(a+tem))
app = ap+d*az
bpp = bp+d*bz
aold = az*1
am = ap/bpp
bm = bp/bpp
az = app/bpp
bz = 1.0
newmask = N.less(abs(az-aold),EPS*abs(az))
frozen = N.where(newmask*N.equal(mask,0), az, frozen)
mask = N.clip(mask+newmask,0,1)
noconverge = asum(N.equal(frozen,-1))
if noconverge <> 0 and verbose:
print 'a or b too big, or ITMAX too small in Betacf for ',noconverge,' elements'
if arrayflag:
return frozen
else:
return frozen[0]
def agammln(xx):
"""
Returns the gamma function of xx.
Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt.
Adapted from: Numerical Recipies in C. Can handle multiple dims ... but
probably doesn't normally have to.
Usage: agammln(xx)
"""
coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516,
0.120858003e-2, -0.536382e-5]
x = xx - 1.0
tmp = x + 5.5
tmp = tmp - (x+0.5)*N.log(tmp)
ser = 1.0
for j in range(len(coeff)):
x = x + 1
ser = ser + coeff[j]/x
return -tmp + N.log(2.50662827465*ser)
def abetai(a,b,x,verbose=1):
"""
Returns the incomplete beta function:
I-sub-x(a,b) = 1/B(a,b)*(Integral(0,x) of t^(a-1)(1-t)^(b-1) dt)
where a,b>0 and B(a,b) = G(a)*G(b)/(G(a+b)) where G(a) is the gamma
function of a. The continued fraction formulation is implemented
here, using the betacf function. (Adapted from: Numerical Recipies in
C.) Can handle multiple dimensions.
Usage: abetai(a,b,x,verbose=1)
"""
TINY = 1e-15
if type(a) == N.ndarray:
if asum(N.less(x,0)+N.greater(x,1)) <> 0:
raise ValueError, 'Bad x in abetai'
x = N.where(N.equal(x,0),TINY,x)
x = N.where(N.equal(x,1.0),1-TINY,x)
bt = N.where(N.equal(x,0)+N.equal(x,1), 0, -1)
exponents = ( gammln(a+b)-gammln(a)-gammln(b)+a*N.log(x)+b*
N.log(1.0-x) )
# 746 (below) is the MAX POSSIBLE BEFORE OVERFLOW
exponents = N.where(N.less(exponents,-740),-740,exponents)
bt = N.exp(exponents)
if type(x) == N.ndarray:
ans = N.where(N.less(x,(a+1)/(a+b+2.0)),
bt*abetacf(a,b,x,verbose)/float(a),
1.0-bt*abetacf(b,a,1.0-x,verbose)/float(b))
else:
if x<(a+1)/(a+b+2.0):
ans = bt*abetacf(a,b,x,verbose)/float(a)
else:
ans = 1.0-bt*abetacf(b,a,1.0-x,verbose)/float(b)
return ans
#####################################
####### AANOVA CALCULATIONS #######
#####################################
import LinearAlgebra, operator
LA = LinearAlgebra
def aglm(data,para):
"""
Calculates a linear model fit ... anova/ancova/lin-regress/t-test/etc. Taken
from:
Peterson et al. Statistical limitations in functional neuroimaging
I. Non-inferential methods and statistical models. Phil Trans Royal Soc
Lond B 354: 1239-1260.
Usage: aglm(data,para)
Returns: statistic, p-value ???
"""
if len(para) <> len(data):
print "data and para must be same length in aglm"
return
n = len(para)
p = pstat.aunique(para)
x = N.zeros((n,len(p))) # design matrix
for l in range(len(p)):
x[:,l] = N.equal(para,p[l])
b = N.dot(N.dot(LA.inv(N.dot(N.transpose(x),x)), # i.e., b=inv(X'X)X'Y
N.transpose(x)),
data)
diffs = (data - N.dot(x,b))
s_sq = 1./(n-len(p)) * N.dot(N.transpose(diffs), diffs)
if len(p) == 2: # ttest_ind
c = N.array([1,-1])
df = n-2
fact = asum(1.0/asum(x,0)) # i.e., 1/n1 + 1/n2 + 1/n3 ...
t = N.dot(c,b) / N.sqrt(s_sq*fact)
probs = abetai(0.5*df,0.5,float(df)/(df+t*t))
return t, probs
def aF_oneway(*args):
"""
Performs a 1-way ANOVA, returning an F-value and probability given
any number of groups. From Heiman, pp.394-7.
Usage: aF_oneway (*args) where *args is 2 or more arrays, one per
treatment group
Returns: f-value, probability
"""
na = len(args) # ANOVA on 'na' groups, each in it's own array
means = [0]*na
vars = [0]*na
ns = [0]*na
alldata = []
tmp = map(N.array,args)
means = map(amean,tmp)
vars = map(avar,tmp)
ns = map(len,args)
alldata = N.concatenate(args)
bign = len(alldata)
sstot = ass(alldata)-(asquare_of_sums(alldata)/float(bign))
ssbn = 0
for a in args:
ssbn = ssbn + asquare_of_sums(N.array(a))/float(len(a))
ssbn = ssbn - (asquare_of_sums(alldata)/float(bign))
sswn = sstot-ssbn
dfbn = na-1
dfwn = bign - na
msb = ssbn/float(dfbn)
msw = sswn/float(dfwn)
f = msb/msw
prob = fprob(dfbn,dfwn,f)
return f, prob
def aF_value (ER,EF,dfR,dfF):
"""
Returns an F-statistic given the following:
ER = error associated with the null hypothesis (the Restricted model)
EF = error associated with the alternate hypothesis (the Full model)
dfR = degrees of freedom the Restricted model
dfF = degrees of freedom associated with the Restricted model
"""
return ((ER-EF)/float(dfR-dfF) / (EF/float(dfF)))
def outputfstats(Enum, Eden, dfnum, dfden, f, prob):
Enum = round(Enum,3)
Eden = round(Eden,3)
dfnum = round(Enum,3)
dfden = round(dfden,3)
f = round(f,3)
prob = round(prob,3)
suffix = '' # for *s after the p-value
if prob < 0.001: suffix = ' ***'
elif prob < 0.01: suffix = ' **'
elif prob < 0.05: suffix = ' *'
title = [['EF/ER','DF','Mean Square','F-value','prob','']]
lofl = title+[[Enum, dfnum, round(Enum/float(dfnum),3), f, prob, suffix],
[Eden, dfden, round(Eden/float(dfden),3),'','','']]
pstat.printcc(lofl)
return
def F_value_multivariate(ER, EF, dfnum, dfden):
"""
Returns an F-statistic given the following:
ER = error associated with the null hypothesis (the Restricted model)
EF = error associated with the alternate hypothesis (the Full model)
dfR = degrees of freedom the Restricted model
dfF = degrees of freedom associated with the Restricted model
where ER and EF are matrices from a multivariate F calculation.
"""
if type(ER) in [IntType, FloatType]:
ER = N.array([[ER]])
if type(EF) in [IntType, FloatType]:
EF = N.array([[EF]])
n_um = (LA.det(ER) - LA.det(EF)) / float(dfnum)
d_en = LA.det(EF) / float(dfden)
return n_um / d_en
#####################################
####### ASUPPORT FUNCTIONS ########
#####################################
def asign(a):
"""
Usage: asign(a)
Returns: array shape of a, with -1 where a<0 and +1 where a>=0
"""
a = N.asarray(a)
if ((type(a) == type(1.4)) or (type(a) == type(1))):
return a-a-N.less(a,0)+N.greater(a,0)
else:
return N.zeros(N.shape(a))-N.less(a,0)+N.greater(a,0)
def asum (a, dimension=None,keepdims=0):
"""
An alternative to the Numeric.add.reduce function, which allows one to
(1) collapse over multiple dimensions at once, and/or (2) to retain
all dimensions in the original array (squashing one down to size.
Dimension can equal None (ravel array first), an integer (the
dimension over which to operate), or a sequence (operate over multiple
dimensions). If keepdims=1, the resulting array will have as many
dimensions as the input array.
Usage: asum(a, dimension=None, keepdims=0)
Returns: array summed along 'dimension'(s), same _number_ of dims if keepdims=1
"""
if type(a) == N.ndarray and a.dtype in [N.int_, N.short, N.ubyte]:
a = a.astype(N.float_)
if dimension == None:
s = N.sum(N.ravel(a))
elif type(dimension) in [IntType,FloatType]:
s = N.add.reduce(a, dimension)
if keepdims == 1:
shp = list(a.shape)
shp[dimension] = 1
s = N.reshape(s,shp)
else: # must be a SEQUENCE of dims to sum over
dims = list(dimension)
dims.sort()
dims.reverse()
s = a *1.0
for dim in dims:
s = N.add.reduce(s,dim)
if keepdims == 1:
shp = list(a.shape)
for dim in dims:
shp[dim] = 1
s = N.reshape(s,shp)
return s
def acumsum (a,dimension=None):
"""
Returns an array consisting of the cumulative sum of the items in the
passed array. Dimension can equal None (ravel array first), an
integer (the dimension over which to operate), or a sequence (operate
over multiple dimensions, but this last one just barely makes sense).
Usage: acumsum(a,dimension=None)
"""
if dimension == None:
a = N.ravel(a)
dimension = 0
if type(dimension) in [ListType, TupleType, N.ndarray]:
dimension = list(dimension)
dimension.sort()
dimension.reverse()
for d in dimension:
a = N.add.accumulate(a,d)
return a
else:
return N.add.accumulate(a,dimension)
def ass(inarray, dimension=None, keepdims=0):
"""
Squares each value in the passed array, adds these squares & returns
the result. Unfortunate function name. :-) Defaults to ALL values in
the array. Dimension can equal None (ravel array first), an integer
(the dimension over which to operate), or a sequence (operate over
multiple dimensions). Set keepdims=1 to maintain the original number
of dimensions.
Usage: ass(inarray, dimension=None, keepdims=0)
Returns: sum-along-'dimension' for (inarray*inarray)
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
return asum(inarray*inarray,dimension,keepdims)
def asummult (array1,array2,dimension=None,keepdims=0):
"""
Multiplies elements in array1 and array2, element by element, and
returns the sum (along 'dimension') of all resulting multiplications.
Dimension can equal None (ravel array first), an integer (the
dimension over which to operate), or a sequence (operate over multiple
dimensions). A trivial function, but included for completeness.
Usage: asummult(array1,array2,dimension=None,keepdims=0)
"""
if dimension == None:
array1 = N.ravel(array1)
array2 = N.ravel(array2)
dimension = 0
return asum(array1*array2,dimension,keepdims)
def asquare_of_sums(inarray, dimension=None, keepdims=0):
"""
Adds the values in the passed array, squares that sum, and returns the
result. Dimension can equal None (ravel array first), an integer (the
dimension over which to operate), or a sequence (operate over multiple
dimensions). If keepdims=1, the returned array will have the same
NUMBER of dimensions as the original.
Usage: asquare_of_sums(inarray, dimension=None, keepdims=0)
Returns: the square of the sum over dim(s) in dimension
"""
if dimension == None:
inarray = N.ravel(inarray)
dimension = 0
s = asum(inarray,dimension,keepdims)
if type(s) == N.ndarray:
return s.astype(N.float_)*s
else:
return float(s)*s
def asumdiffsquared(a,b, dimension=None, keepdims=0):
"""
Takes pairwise differences of the values in arrays a and b, squares
these differences, and returns the sum of these squares. Dimension
can equal None (ravel array first), an integer (the dimension over
which to operate), or a sequence (operate over multiple dimensions).
keepdims=1 means the return shape = len(a.shape) = len(b.shape)
Usage: asumdiffsquared(a,b)
Returns: sum[ravel(a-b)**2]
"""
if dimension == None:
inarray = N.ravel(a)
dimension = 0
return asum((a-b)**2,dimension,keepdims)
def ashellsort(inarray):
"""
Shellsort algorithm. Sorts a 1D-array.
Usage: ashellsort(inarray)
Returns: sorted-inarray, sorting-index-vector (for original array)
"""
n = len(inarray)
svec = inarray *1.0
ivec = range(n)
gap = n/2 # integer division needed
while gap >0:
for i in range(gap,n):
for j in range(i-gap,-1,-gap):
while j>=0 and svec[j]>svec[j+gap]:
temp = svec[j]
svec[j] = svec[j+gap]
svec[j+gap] = temp
itemp = ivec[j]
ivec[j] = ivec[j+gap]
ivec[j+gap] = itemp
gap = gap / 2 # integer division needed
# svec is now sorted input vector, ivec has the order svec[i] = vec[ivec[i]]
return svec, ivec
def arankdata(inarray):
"""
Ranks the data in inarray, dealing with ties appropritely. Assumes
a 1D inarray. Adapted from Gary Perlman's |Stat ranksort.
Usage: arankdata(inarray)
Returns: array of length equal to inarray, containing rank scores
"""
n = len(inarray)
svec, ivec = ashellsort(inarray)
sumranks = 0
dupcount = 0
newarray = N.zeros(n,N.float_)
for i in range(n):
sumranks = sumranks + i
dupcount = dupcount + 1
if i==n-1 or svec[i] <> svec[i+1]:
averank = sumranks / float(dupcount) + 1
for j in range(i-dupcount+1,i+1):
newarray[ivec[j]] = averank
sumranks = 0
dupcount = 0
return newarray
def afindwithin(data):
"""
Returns a binary vector, 1=within-subject factor, 0=between. Input
equals the entire data array (i.e., column 1=random factor, last
column = measured values.
Usage: afindwithin(data) data in |Stat format
"""
numfact = len(data[0])-2
withinvec = [0]*numfact
for col in range(1,numfact+1):
rows = pstat.linexand(data,col,pstat.unique(pstat.colex(data,1))[0]) # get 1 level of this factor
if len(pstat.unique(pstat.colex(rows,0))) < len(rows): # if fewer subjects than scores on this factor
withinvec[col-1] = 1
return withinvec
#########################################################
#########################################################
###### RE-DEFINE DISPATCHES TO INCLUDE ARRAYS #########
#########################################################
#########################################################
## CENTRAL TENDENCY:
geometricmean = Dispatch ( (lgeometricmean, (ListType, TupleType)),
(ageometricmean, (N.ndarray,)) )
harmonicmean = Dispatch ( (lharmonicmean, (ListType, TupleType)),
(aharmonicmean, (N.ndarray,)) )
mean = Dispatch ( (lmean, (ListType, TupleType)),
(amean, (N.ndarray,)) )
median = Dispatch ( (lmedian, (ListType, TupleType)),
(amedian, (N.ndarray,)) )
medianscore = Dispatch ( (lmedianscore, (ListType, TupleType)),
(amedianscore, (N.ndarray,)) )
mode = Dispatch ( (lmode, (ListType, TupleType)),
(amode, (N.ndarray,)) )
tmean = Dispatch ( (atmean, (N.ndarray,)) )
tvar = Dispatch ( (atvar, (N.ndarray,)) )
tstdev = Dispatch ( (atstdev, (N.ndarray,)) )
tsem = Dispatch ( (atsem, (N.ndarray,)) )
## VARIATION:
moment = Dispatch ( (lmoment, (ListType, TupleType)),
(amoment, (N.ndarray,)) )
variation = Dispatch ( (lvariation, (ListType, TupleType)),
(avariation, (N.ndarray,)) )
skew = Dispatch ( (lskew, (ListType, TupleType)),
(askew, (N.ndarray,)) )
kurtosis = Dispatch ( (lkurtosis, (ListType, TupleType)),
(akurtosis, (N.ndarray,)) )
describe = Dispatch ( (ldescribe, (ListType, TupleType)),
(adescribe, (N.ndarray,)) )
## DISTRIBUTION TESTS
skewtest = Dispatch ( (askewtest, (ListType, TupleType)),
(askewtest, (N.ndarray,)) )
kurtosistest = Dispatch ( (akurtosistest, (ListType, TupleType)),
(akurtosistest, (N.ndarray,)) )
normaltest = Dispatch ( (anormaltest, (ListType, TupleType)),
(anormaltest, (N.ndarray,)) )
## FREQUENCY STATS:
itemfreq = Dispatch ( (litemfreq, (ListType, TupleType)),
(aitemfreq, (N.ndarray,)) )
scoreatpercentile = Dispatch ( (lscoreatpercentile, (ListType, TupleType)),
(ascoreatpercentile, (N.ndarray,)) )
percentileofscore = Dispatch ( (lpercentileofscore, (ListType, TupleType)),
(apercentileofscore, (N.ndarray,)) )
histogram = Dispatch ( (lhistogram, (ListType, TupleType)),
(ahistogram, (N.ndarray,)) )
cumfreq = Dispatch ( (lcumfreq, (ListType, TupleType)),
(acumfreq, (N.ndarray,)) )
relfreq = Dispatch ( (lrelfreq, (ListType, TupleType)),
(arelfreq, (N.ndarray,)) )
## VARIABILITY:
obrientransform = Dispatch ( (lobrientransform, (ListType, TupleType)),
(aobrientransform, (N.ndarray,)) )
samplevar = Dispatch ( (lsamplevar, (ListType, TupleType)),
(asamplevar, (N.ndarray,)) )
samplestdev = Dispatch ( (lsamplestdev, (ListType, TupleType)),
(asamplestdev, (N.ndarray,)) )
signaltonoise = Dispatch( (asignaltonoise, (N.ndarray,)),)
var = Dispatch ( (lvar, (ListType, TupleType)),
(avar, (N.ndarray,)) )
stdev = Dispatch ( (lstdev, (ListType, TupleType)),
(astdev, (N.ndarray,)) )
sterr = Dispatch ( (lsterr, (ListType, TupleType)),
(asterr, (N.ndarray,)) )
sem = Dispatch ( (lsem, (ListType, TupleType)),
(asem, (N.ndarray,)) )
z = Dispatch ( (lz, (ListType, TupleType)),
(az, (N.ndarray,)) )
zs = Dispatch ( (lzs, (ListType, TupleType)),
(azs, (N.ndarray,)) )
## TRIMMING FCNS:
threshold = Dispatch( (athreshold, (N.ndarray,)),)
trimboth = Dispatch ( (ltrimboth, (ListType, TupleType)),
(atrimboth, (N.ndarray,)) )
trim1 = Dispatch ( (ltrim1, (ListType, TupleType)),
(atrim1, (N.ndarray,)) )
## CORRELATION FCNS:
paired = Dispatch ( (lpaired, (ListType, TupleType)),
(apaired, (N.ndarray,)) )
lincc = Dispatch ( (llincc, (ListType, TupleType)),
(alincc, (N.ndarray,)) )
pearsonr = Dispatch ( (lpearsonr, (ListType, TupleType)),
(apearsonr, (N.ndarray,)) )
spearmanr = Dispatch ( (lspearmanr, (ListType, TupleType)),
(aspearmanr, (N.ndarray,)) )
pointbiserialr = Dispatch ( (lpointbiserialr, (ListType, TupleType)),
(apointbiserialr, (N.ndarray,)) )
kendalltau = Dispatch ( (lkendalltau, (ListType, TupleType)),
(akendalltau, (N.ndarray,)) )
linregress = Dispatch ( (llinregress, (ListType, TupleType)),
(alinregress, (N.ndarray,)) )
## INFERENTIAL STATS:
ttest_1samp = Dispatch ( (lttest_1samp, (ListType, TupleType)),
(attest_1samp, (N.ndarray,)) )
ttest_ind = Dispatch ( (lttest_ind, (ListType, TupleType)),
(attest_ind, (N.ndarray,)) )
ttest_rel = Dispatch ( (lttest_rel, (ListType, TupleType)),
(attest_rel, (N.ndarray,)) )
chisquare = Dispatch ( (lchisquare, (ListType, TupleType)),
(achisquare, (N.ndarray,)) )
ks_2samp = Dispatch ( (lks_2samp, (ListType, TupleType)),
(aks_2samp, (N.ndarray,)) )
mannwhitneyu = Dispatch ( (lmannwhitneyu, (ListType, TupleType)),
(amannwhitneyu, (N.ndarray,)) )
tiecorrect = Dispatch ( (ltiecorrect, (ListType, TupleType)),
(atiecorrect, (N.ndarray,)) )
ranksums = Dispatch ( (lranksums, (ListType, TupleType)),
(aranksums, (N.ndarray,)) )
wilcoxont = Dispatch ( (lwilcoxont, (ListType, TupleType)),
(awilcoxont, (N.ndarray,)) )
kruskalwallish = Dispatch ( (lkruskalwallish, (ListType, TupleType)),
(akruskalwallish, (N.ndarray,)) )
friedmanchisquare = Dispatch ( (lfriedmanchisquare, (ListType, TupleType)),
(afriedmanchisquare, (N.ndarray,)) )
## PROBABILITY CALCS:
chisqprob = Dispatch ( (lchisqprob, (IntType, FloatType)),
(achisqprob, (N.ndarray,)) )
zprob = Dispatch ( (lzprob, (IntType, FloatType)),
(azprob, (N.ndarray,)) )
ksprob = Dispatch ( (lksprob, (IntType, FloatType)),
(aksprob, (N.ndarray,)) )
fprob = Dispatch ( (lfprob, (IntType, FloatType)),
(afprob, (N.ndarray,)) )
betacf = Dispatch ( (lbetacf, (IntType, FloatType)),
(abetacf, (N.ndarray,)) )
betai = Dispatch ( (lbetai, (IntType, FloatType)),
(abetai, (N.ndarray,)) )
erfcc = Dispatch ( (lerfcc, (IntType, FloatType)),
(aerfcc, (N.ndarray,)) )
gammln = Dispatch ( (lgammln, (IntType, FloatType)),
(agammln, (N.ndarray,)) )
## ANOVA FUNCTIONS:
F_oneway = Dispatch ( (lF_oneway, (ListType, TupleType)),
(aF_oneway, (N.ndarray,)) )
F_value = Dispatch ( (lF_value, (ListType, TupleType)),
(aF_value, (N.ndarray,)) )
## SUPPORT FUNCTIONS:
incr = Dispatch ( (lincr, (ListType, TupleType, N.ndarray)), )
sum = Dispatch ( (lsum, (ListType, TupleType)),
(asum, (N.ndarray,)) )
cumsum = Dispatch ( (lcumsum, (ListType, TupleType)),
(acumsum, (N.ndarray,)) )
ss = Dispatch ( (lss, (ListType, TupleType)),
(ass, (N.ndarray,)) )
summult = Dispatch ( (lsummult, (ListType, TupleType)),
(asummult, (N.ndarray,)) )
square_of_sums = Dispatch ( (lsquare_of_sums, (ListType, TupleType)),
(asquare_of_sums, (N.ndarray,)) )
sumdiffsquared = Dispatch ( (lsumdiffsquared, (ListType, TupleType)),
(asumdiffsquared, (N.ndarray,)) )
shellsort = Dispatch ( (lshellsort, (ListType, TupleType)),
(ashellsort, (N.ndarray,)) )
rankdata = Dispatch ( (lrankdata, (ListType, TupleType)),
(arankdata, (N.ndarray,)) )
findwithin = Dispatch ( (lfindwithin, (ListType, TupleType)),
(afindwithin, (N.ndarray,)) )
###################### END OF NUMERIC FUNCTION BLOCK #####################
###################### END OF STATISTICAL FUNCTIONS ######################
except ImportError:
pass
|
Esri/ArcREST | refs/heads/master | src/arcrest/manageags/_logs.py | 6 | from __future__ import absolute_import
from __future__ import print_function
from .._abstract.abstract import BaseAGSServer
from datetime import datetime
import csv, json
########################################################################
class Log(BaseAGSServer):
""" Log of a server """
_url = None
_securityHandler = None
_operations = None
_resources = None
_proxy_port = None
_proxy_url = None
_json = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler,
proxy_url=None, proxy_port=None,
initialize=False):
"""Constructor
Inputs:
url - admin url
securityHandler - Handler that handles site security
username - admin username
password - admin password
"""
self._proxy_port = proxy_port
self._proxy_url = proxy_url
self._url = url
self._securityHandler = securityHandler
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" populates server admin information """
params = {
"f" : "json"
}
json_dict = self._get(url=self._url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in Logs.")
del k
del v
#----------------------------------------------------------------------
def __str__(self):
"""returns the object as a string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
@property
def operations(self):
""" returns the operations """
if self._operations is None:
self.__init()
return self._operations
#----------------------------------------------------------------------
@property
def resources(self):
""" returns the log resources """
if self._resources is None:
self.__init()
return self._resources
#----------------------------------------------------------------------
def countErrorReports(self, machine="*"):
""" This operation counts the number of error reports (crash
reports) that have been generated on each machine.
Input:
machine - name of the machine in the cluster. * means all
machines. This is default
Output:
dictionary with report count and machine name
"""
params = {
"f": "json",
"machine" : machine
}
return self._post(url=self._url + "/countErrorReports",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def clean(self):
""" Deletes all the log files on all server machines in the site. """
params = {
"f" : "json",
}
return self._post(url=self._url + "/clean",
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
@property
def logSettings(self):
""" returns the current log settings """
params = {
"f" : "json"
}
sURL = self._url + "/settings"
return self._get(url=sURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)['settings']
#----------------------------------------------------------------------
def editLogSettings(self,
logLevel="WARNING",
logDir=None,
maxLogFileAge=90,
maxErrorReportsCount=10):
"""
The log settings are for the entire site.
Inputs:
logLevel - Can be one of [OFF, SEVERE, WARNING, INFO, FINE,
VERBOSE, DEBUG].
logDir - File path to the root of the log directory
maxLogFileAge - number of days that a server should save a log
file.
maxErrorReportsCount - maximum number of error report files
per machine
"""
lURL = self._url + "/settings/edit"
allowed_levels = ("OFF", "SEVERE", "WARNING", "INFO", "FINE", "VERBOSE", "DEBUG")
currentSettings= self.logSettings
currentSettings["f"] ="json"
if logLevel.upper() in allowed_levels:
currentSettings['logLevel'] = logLevel.upper()
if logDir is not None:
currentSettings['logDir'] = logDir
if maxLogFileAge is not None and \
isinstance(maxLogFileAge, int):
currentSettings['maxLogFileAge'] = maxLogFileAge
if maxErrorReportsCount is not None and \
isinstance(maxErrorReportsCount, int) and\
maxErrorReportsCount > 0:
currentSettings['maxErrorReportsCount'] = maxErrorReportsCount
return self._post(url=lURL, param_dict=currentSettings,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def query(self,
startTime=None,
endTime=None,
sinceServerStart=False,
level="WARNING",
services="*",
machines="*",
server="*",
codes=[],
processIds=[],
export=False,
exportType="CSV", #CSV or TAB
out_path=None
):
"""
The query operation on the logs resource provides a way to
aggregate, filter, and page through logs across the entire site.
Inputs:
"""
allowed_levels = ("SEVERE", "WARNING", "INFO",
"FINE", "VERBOSE", "DEBUG")
qFilter = {
"services": "*",
"machines": "*",
"server" : "*"
}
if len(processIds) > 0:
qFilter['processIds'] = processIds
if len(codes) > 0:
qFilter['codes'] = codes
params = {
"f" : "json",
"sinceServerStart" : sinceServerStart,
"pageSize" : 10000
}
if startTime is not None and \
isinstance(startTime, datetime):
params['startTime'] = startTime.strftime("%Y-%m-%dT%H:%M:%S")
if endTime is not None and \
isinstance(endTime, datetime):
params['endTime'] = endTime.strftime("%Y-%m-%dT%H:%M:%S")
if level.upper() in allowed_levels:
params['level'] = level
if server != "*":
qFilter['server'] = server.split(',')
if services != "*":
qFilter['services'] = services.split(',')
if machines != "*":
qFilter['machines'] = machines.split(",")
params['filter'] = qFilter
if export == True and \
out_path is not None:
messages = self._post(self._url + "/query", params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
with open(name=out_path, mode='wb') as f:
hasKeys = False
if exportType == "TAB":
csvwriter = csv.writer(f, delimiter='\t')
else:
csvwriter = csv.writer(f)
for message in messages['logMessages']:
if hasKeys == False:
csvwriter.writerow(message.keys())
hasKeys = True
csvwriter.writerow(message.values())
del message
del messages
return out_path
else:
return self._post(self._url + "/query", params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
|
Eforcers/inbox-cleaner | refs/heads/master | src/lib/requests/packages/charade/langhebrewmodel.py | 2762 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': False,
'charsetName': "windows-1255"
}
# flake8: noqa
|
goodwinnk/intellij-community | refs/heads/master | python/testData/refactoring/extractsuperclass/moveExtends/source_module.after.py | 320 | from dest_module import NewParent
class MyClass(NewParent):
pass |
silveregg/moto | refs/heads/master | tests/test_ec2/test_key_pairs.py | 2 | from __future__ import unicode_literals
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises
from nose.tools import assert_raises
import boto
import six
import sure # noqa
from boto.exception import EC2ResponseError, JSONResponseError
from moto import mock_ec2
@mock_ec2
def test_key_pairs_empty():
conn = boto.connect_ec2('the_key', 'the_secret')
assert len(conn.get_all_key_pairs()) == 0
@mock_ec2
def test_key_pairs_invalid_id():
conn = boto.connect_ec2('the_key', 'the_secret')
with assert_raises(EC2ResponseError) as cm:
conn.get_all_key_pairs('foo')
cm.exception.code.should.equal('InvalidKeyPair.NotFound')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
@mock_ec2
def test_key_pairs_create():
conn = boto.connect_ec2('the_key', 'the_secret')
with assert_raises(JSONResponseError) as ex:
kp = conn.create_key_pair('foo', dry_run=True)
ex.exception.reason.should.equal('DryRunOperation')
ex.exception.status.should.equal(400)
ex.exception.message.should.equal('An error occurred (DryRunOperation) when calling the CreateKeyPair operation: Request would have succeeded, but DryRun flag is set')
kp = conn.create_key_pair('foo')
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
kps = conn.get_all_key_pairs()
assert len(kps) == 1
assert kps[0].name == 'foo'
@mock_ec2
def test_key_pairs_create_two():
conn = boto.connect_ec2('the_key', 'the_secret')
kp = conn.create_key_pair('foo')
kp = conn.create_key_pair('bar')
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
kps = conn.get_all_key_pairs()
kps.should.have.length_of(2)
[i.name for i in kps].should.contain('foo')
[i.name for i in kps].should.contain('bar')
kps = conn.get_all_key_pairs('foo')
kps.should.have.length_of(1)
kps[0].name.should.equal('foo')
@mock_ec2
def test_key_pairs_create_exist():
conn = boto.connect_ec2('the_key', 'the_secret')
kp = conn.create_key_pair('foo')
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
assert len(conn.get_all_key_pairs()) == 1
with assert_raises(EC2ResponseError) as cm:
conn.create_key_pair('foo')
cm.exception.code.should.equal('InvalidKeyPair.Duplicate')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
@mock_ec2
def test_key_pairs_delete_no_exist():
conn = boto.connect_ec2('the_key', 'the_secret')
assert len(conn.get_all_key_pairs()) == 0
r = conn.delete_key_pair('foo')
r.should.be.ok
@mock_ec2
def test_key_pairs_delete_exist():
conn = boto.connect_ec2('the_key', 'the_secret')
conn.create_key_pair('foo')
with assert_raises(JSONResponseError) as ex:
r = conn.delete_key_pair('foo', dry_run=True)
ex.exception.reason.should.equal('DryRunOperation')
ex.exception.status.should.equal(400)
ex.exception.message.should.equal('An error occurred (DryRunOperation) when calling the DeleteKeyPair operation: Request would have succeeded, but DryRun flag is set')
r = conn.delete_key_pair('foo')
r.should.be.ok
assert len(conn.get_all_key_pairs()) == 0
@mock_ec2
def test_key_pairs_import():
conn = boto.connect_ec2('the_key', 'the_secret')
with assert_raises(JSONResponseError) as ex:
kp = conn.import_key_pair('foo', b'content', dry_run=True)
ex.exception.reason.should.equal('DryRunOperation')
ex.exception.status.should.equal(400)
ex.exception.message.should.equal('An error occurred (DryRunOperation) when calling the ImportKeyPair operation: Request would have succeeded, but DryRun flag is set')
kp = conn.import_key_pair('foo', b'content')
assert kp.name == 'foo'
kps = conn.get_all_key_pairs()
assert len(kps) == 1
assert kps[0].name == 'foo'
@mock_ec2
def test_key_pairs_import_exist():
conn = boto.connect_ec2('the_key', 'the_secret')
kp = conn.import_key_pair('foo', b'content')
assert kp.name == 'foo'
assert len(conn.get_all_key_pairs()) == 1
with assert_raises(EC2ResponseError) as cm:
conn.create_key_pair('foo')
cm.exception.code.should.equal('InvalidKeyPair.Duplicate')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
|
nelson-liu/scikit-learn | refs/heads/master | sklearn/svm/bounds.py | 36 | """Determination of parameter bounds"""
# Author: Paolo Losi
# License: BSD 3 clause
from warnings import warn
import numpy as np
from ..preprocessing import LabelBinarizer
from ..utils.validation import check_consistent_length, check_array
from ..utils.extmath import safe_sparse_dot
def l1_min_c(X, y, loss='squared_hinge', fit_intercept=True,
intercept_scaling=1.0):
"""
Return the lowest bound for C such that for C in (l1_min_C, infinity)
the model is guaranteed not to be empty. This applies to l1 penalized
classifiers, such as LinearSVC with penalty='l1' and
linear_model.LogisticRegression with penalty='l1'.
This value is valid if class_weight parameter in fit() is not set.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target vector relative to X
loss : {'squared_hinge', 'log'}, default 'squared_hinge'
Specifies the loss function.
With 'squared_hinge' it is the squared hinge loss (a.k.a. L2 loss).
With 'log' it is the loss of logistic regression models.
'l2' is accepted as an alias for 'squared_hinge', for backward
compatibility reasons, but should not be used in new code.
fit_intercept : bool, default: True
Specifies if the intercept should be fitted by the model.
It must match the fit() method parameter.
intercept_scaling : float, default: 1
when fit_intercept is True, instance vector x becomes
[x, intercept_scaling],
i.e. a "synthetic" feature with constant value equals to
intercept_scaling is appended to the instance vector.
It must match the fit() method parameter.
Returns
-------
l1_min_c : float
minimum value for C
"""
if loss not in ('squared_hinge', 'log'):
raise ValueError('loss type not in ("squared_hinge", "log", "l2")')
X = check_array(X, accept_sparse='csc')
check_consistent_length(X, y)
Y = LabelBinarizer(neg_label=-1).fit_transform(y).T
# maximum absolute value over classes and features
den = np.max(np.abs(safe_sparse_dot(Y, X)))
if fit_intercept:
bias = intercept_scaling * np.ones((np.size(y), 1))
den = max(den, abs(np.dot(Y, bias)).max())
if den == 0.0:
raise ValueError('Ill-posed l1_min_c calculation: l1 will always '
'select zero coefficients for this data')
if loss == 'squared_hinge':
return 0.5 / den
else: # loss == 'log':
return 2.0 / den
|
yoe/veyepar | refs/heads/master | dj/scripts/fix.py | 3 | #!/usr/bin/python
# fix.py - mass fix some goof
from process import process
from main.models import Show, Location, Episode, Raw_File, Cut_List
class fix(process):
# this will bump everything from 5 back to 4
ready_state = 7
def process_ep(self, ep):
if self.options.verbose: print(ep.id, ep.name)
ep.state = 4
ep.save()
ret = False # False else it will bump it +1)
return ret
if __name__ == '__main__':
p=fix()
p.main()
|
tkelman/utf8rewind | refs/heads/master | tools/gyp/test/actions/src/subdir3/generate_main.py | 125 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = """
#include <stdio.h>
int main(void)
{
printf("Hello from generate_main.py\\n");
return 0;
}
"""
open(sys.argv[1], 'w').write(contents)
sys.exit(0)
|
pkuyym/Paddle | refs/heads/develop | python/paddle/fluid/tests/unittests/test_cast_op.py | 5 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import op_test
import unittest
import numpy as np
import paddle.fluid.core as core
class TestCastOp1(op_test.OpTest):
def setUp(self):
ipt = np.random.random(size=[10, 10])
self.inputs = {'X': ipt.astype('float32')}
self.outputs = {'Out': ipt.astype('float64')}
self.attrs = {
'in_dtype': int(core.VarDesc.VarType.FP32),
'out_dtype': int(core.VarDesc.VarType.FP64)
}
self.op_type = 'cast'
def test_check_output(self):
self.check_output()
def test_grad(self):
self.check_grad(['X'], ['Out'])
class TestCastOp2(op_test.OpTest):
def setUp(self):
ipt = np.random.random(size=[10, 10])
# numpy float16 is binded to fluid float16 via uint16
self.inputs = {'X': ipt.astype('float16').view(np.uint16)}
self.outputs = {'Out': ipt.astype('float32')}
self.attrs = {
'in_dtype': int(core.VarDesc.VarType.FP16),
'out_dtype': int(core.VarDesc.VarType.FP32)
}
self.op_type = 'cast'
def test_check_output(self):
self.check_output(atol=1e-3)
class TestCastOp3(op_test.OpTest):
def setUp(self):
ipt = np.random.random(size=[10, 10])
self.inputs = {'X': ipt.astype('float32')}
self.outputs = {'Out': ipt.astype('float16')}
self.attrs = {
'in_dtype': int(core.VarDesc.VarType.FP32),
'out_dtype': int(core.VarDesc.VarType.FP16)
}
self.op_type = 'cast'
def test_check_output(self):
self.check_output(atol=1e-3)
if __name__ == '__main__':
unittest.main()
|
bregman-arie/ansible | refs/heads/devel | lib/ansible/module_utils/openstack.py | 20 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
from ansible.module_utils.six import iteritems
def openstack_argument_spec():
# DEPRECATED: This argument spec is only used for the deprecated old
# OpenStack modules. It turns out that modern OpenStack auth is WAY
# more complex than this.
# Consume standard OpenStack environment variables.
# This is mainly only useful for ad-hoc command line operation as
# in playbooks one would assume variables would be used appropriately
OS_AUTH_URL = os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
OS_PASSWORD = os.environ.get('OS_PASSWORD', None)
OS_REGION_NAME = os.environ.get('OS_REGION_NAME', None)
OS_USERNAME = os.environ.get('OS_USERNAME', 'admin')
OS_TENANT_NAME = os.environ.get('OS_TENANT_NAME', OS_USERNAME)
spec = dict(
login_username=dict(default=OS_USERNAME),
auth_url=dict(default=OS_AUTH_URL),
region_name=dict(default=OS_REGION_NAME),
availability_zone=dict(),
)
if OS_PASSWORD:
spec['login_password'] = dict(default=OS_PASSWORD)
else:
spec['login_password'] = dict(required=True)
if OS_TENANT_NAME:
spec['login_tenant_name'] = dict(default=OS_TENANT_NAME)
else:
spec['login_tenant_name'] = dict(required=True)
return spec
def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret = []
for (k, v) in iteritems(addresses):
if key_name and k == key_name:
ret.extend([addrs['addr'] for addrs in v])
else:
for interface_spec in v:
if 'OS-EXT-IPS:type' in interface_spec and interface_spec['OS-EXT-IPS:type'] == ext_tag:
ret.append(interface_spec['addr'])
return ret
def openstack_full_argument_spec(**kwargs):
spec = dict(
cloud=dict(default=None, type='raw'),
auth_type=dict(default=None),
auth=dict(default=None, type='dict', no_log=True),
region_name=dict(default=None),
availability_zone=dict(default=None),
verify=dict(default=None, type='bool', aliases=['validate_certs']),
cacert=dict(default=None),
cert=dict(default=None),
key=dict(default=None, no_log=True),
wait=dict(default=True, type='bool'),
timeout=dict(default=180, type='int'),
api_timeout=dict(default=None, type='int'),
interface=dict(
default='public', choices=['public', 'internal', 'admin'],
aliases=['endpoint_type']),
)
spec.update(kwargs)
return spec
def openstack_module_kwargs(**kwargs):
ret = {}
for key in ('mutually_exclusive', 'required_together', 'required_one_of'):
if key in kwargs:
if key in ret:
ret[key].extend(kwargs[key])
else:
ret[key] = kwargs[key]
return ret
def openstack_cloud_from_module(module, min_version=None):
from distutils.version import StrictVersion
try:
import shade
except ImportError:
module.fail_json(msg='shade is required for this module')
if min_version:
if StrictVersion(shade.__version__) < StrictVersion(min_version):
module.fail_json(
msg="To utilize this module, the installed version of"
"the shade library MUST be >={min_version}".format(
min_version=min_version))
cloud_config = module.params.pop('cloud', None)
if isinstance(cloud_config, dict):
fail_message = (
"A cloud config dict was provided to the cloud parameter"
" but also a value was provided for {param}. If a cloud"
" config dict is provided, {param} should be"
" excluded.")
for param in (
'auth', 'region_name', 'verify',
'cacert', 'key', 'api_timeout', 'interface'):
if module.params[param] is not None:
module.fail_json(fail_message.format(param=param))
if module.params['auth_type'] != 'password':
module.fail_json(fail_message.format(param='auth_type'))
return shade, shade.operator_cloud(**cloud_config)
else:
return shade, shade.operator_cloud(
cloud=cloud_config,
auth_type=module.params['auth_type'],
auth=module.params['auth'],
region_name=module.params['region_name'],
verify=module.params['verify'],
cacert=module.params['cacert'],
key=module.params['key'],
api_timeout=module.params['api_timeout'],
interface=module.params['interface'],
)
|
release-monitoring/anitya | refs/heads/master | anitya/lib/backends/rubygems.py | 1 | # -*- coding: utf-8 -*-
"""
(c) 2014-2019 - Copyright Red Hat Inc
Authors:
Pierre-Yves Chibon <pingou@pingoured.fr>
Ralph Bean <rbean@redhat.com>
Michal Konecny <mkonecny@redhat.com>
"""
from anitya.lib.backends import BaseBackend
from anitya.lib.exceptions import AnityaPluginException
class RubygemsBackend(BaseBackend):
""" The custom class for projects hosted on rubygems.org.
This backend allows to specify a version_url and a regex that will
be used to retrieve the version information. """
name = "Rubygems"
examples = ["https://rubygems.org/gems/aa", "https://rubygems.org/gems/bio"]
@classmethod
def get_version(cls, project):
""" Method called to retrieve the latest version of the projects
provided, project that relies on the backend of this plugin.
:arg Project project: a :class:`anitya.db.models.Project` object whose backend
corresponds to the current plugin.
:return: the latest version found upstream
:return type: str
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the version cannot be retrieved correctly
"""
return cls.get_ordered_versions(project)[-1]
@classmethod
def get_version_url(cls, project):
""" Method called to retrieve the url used to check for new version
of the project provided, project that relies on the backend of this plugin.
Attributes:
project (:obj:`anitya.db.models.Project`): Project object whose backend
corresponds to the current plugin.
Returns:
str: url used for version checking
"""
url = "https://rubygems.org/api/v1/versions/%(name)s/latest.json" % {
"name": project.name
}
return url
@classmethod
def get_versions(cls, project):
""" Method called to retrieve all the versions (that can be found)
of the projects provided, project that relies on the backend of
this plugin.
:arg Project project: a :class:`anitya.db.models.Project` object whose backend
corresponds to the current plugin.
:return: a list of all the possible releases found
:return type: list
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the versions cannot be retrieved correctly
"""
url = cls.get_version_url(project)
last_change = project.get_time_last_created_version()
try:
req = cls.call_url(url, last_change=last_change)
except Exception: # pragma: no cover
raise AnityaPluginException("Could not contact %s" % url)
# Not modified
if req.status_code == 304:
return []
try:
data = req.json()
except Exception: # pragma: no cover
raise AnityaPluginException("No JSON returned by %s" % url)
if data["version"] == "unknown":
raise AnityaPluginException("Project or version unknown at %s" % url)
return [data["version"]]
@classmethod
def check_feed(cls):
""" Return a generator over the latest 50 uploads to rubygems.org
by querying the JSON API.
"""
url = "https://rubygems.org/api/v1/activity/just_updated.json"
try:
response = cls.call_url(url)
except Exception: # pragma: no cover
raise AnityaPluginException("Could not contact %s" % url)
try:
data = response.json()
except Exception: # pragma: no cover
raise AnityaPluginException("No XML returned by %s" % url)
for item in data:
name, version = item["name"], item["version"]
homepage = "https://rubygems.org/gems/%s" % name
yield name, homepage, cls.name, version
|
danjac/pyramid-medley | refs/heads/master | setup.py | 1 | from setuptools import setup
setup(
name='pyramid-medley',
version='0.0.1',
author='Dan Jacob',
author_email='danjac354@gmail.com',
packages=['pyramid_medley'],
scripts=[],
license='LICENSE.txt',
description='Pyramid/Jinja2/SQLAlchemy scaffold',
long_description=open('README.md').read(),
install_requires=[
'pyramid',
],
entry_points="""
[pyramid.scaffold]
medley=pyramid_medley.scaffolds:MedleyTemplate
""",
)
|
ea4gja/mrig | refs/heads/master | mrigd/tt_orion.py | 1 | #!/usr/bin/env python
#
# File: tt_orion.py
# Version: 1.0
#
# mrigd: TT_orion class definition
# Copyright (c) 2006-2008 Martin Ewing
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
# Contact ewing @@ alum.mit.edu or c/o San Pasqual Consulting, 28
# Wood Road, Branford CT 06405, USA.
import sys,time
from tentec import *
# To do:
# Squelch settings
# Antenna settings
# Mic source
# Manual notch (when TT supports it)
# As always, more work is needed to overcome Orion timeout problems.
# These definitions are used to inform client (rig.info) about rig's
# structure AND are then used to recognize client's commands to us.
# Modes should be a subset of a standard list of possible modes
# maintained for all rigs - to make client's life easier.
FREQIO_BINARY = True # Use faster(?) binary I/O, not alpha for VFO freq.
ORION_MODE_LIST = [ 'USB', 'LSB', 'CW', 'CWR',
'AM', 'FM', 'RTTY' ]
# Orion would prefer 'UCW', 'LCW', and 'FSK', but we use Hamlib convention.
ORION_VFO_LIST = ['VFOA', 'VFOB']
ORION_RX_LIST = ['MAIN', 'SUB' ]
ORION_ATTEN_DICT = { 'OFF':0, '6dB':1, '12dB':2, '18dB':3 }
ORION_TX_LIST = ['TX']
ORION_BACKEND_ID = 'Orion Backend v. 0.23'
ORION_VFO_STEP_LIST = [1, 10, 100, 1000, 5000, 10000, 100000]
# Bandpass std. settings, (center, width) float Hz
ORION_BANDPASS_SETTINGS = { 'NARROW':'0. 400.', 'MEDIUM':'0. 2400.',
'WIDE':'0. 3000.' }
# AGC std. settings
ORION_AGC_MODES = [ 'FAST', 'MEDIUM', 'SLOW', 'OFF', 'PROG' ]
# Mic (tx audio) source
ORION_MIC_SOURCES = [ 'NORMAL', 'ALTERNATE' ]
# Tx Antennas
ORION_TX_ANTENNAS = [ 'ANT1', 'ANT2' ]
# Rx Antennas
ORION_RX_ANTENNAS = [ 'ANT1', 'ANT2', 'ANT3' ]
ORION_MEMORY_CHANNELS = [ 1, 200 ]
ORION_DSP_DELAY = 0.2 # secs to wait after bandpass op.
ORION_RIT_RANGE_LIST = [-10000., +10000.] # float Hz
ORION_XIT_RANGE_LIST = ORION_RIT_RANGE_LIST
# Mappings to Orion's ASCII command parameters
ORION_VFO_MAP = { 'VFOA':'A', 'VFOB':'B' }
ORION_VFO_ASSIGN = { 'VFOA':'M', 'VFOB':'S' }
ORION_RX_MAP = { 'MAIN':'M', 'SUB':'S' }
ORION_TX_MAP = { 'TX':'M' } # TX is same as MAIN Rx
ORION_ANT_ASSIGN = { 'M':'ANT1', 'S':'ANT1' } # or the other way round?
ORION_MODE_MAP = { 'USB':'0', 'LSB':'1', 'CW':'2', 'CWR':'3',
'AM':'4', 'FM':'5', 'RTTY':'6' }
ORION_AGC_MODE_MAP = { 'FAST':'F', 'MEDIUM':'M', 'SLOW':'S',
'OFF':'O', 'PROG':'P' }
#Simplified standard mapping for the VFO - RX
ORION_VFO_TO_RX = { 'VFOA':'MAIN', 'VFOB':'SUB' }
# Supported frequency bands - valid VFO settings, float Hz
ORION_BAND_MAIN = [ (1.79e6,2.09e6), (3.49e6,4.075e6),
(6.89e6,7.43e6), (5.1e6,5.425e6),
(10.09e6,10.16e6), (13.99e6,15.01e6),
(18.058e6,18.178e6), (20.99e6,21.46e6),
(24.88e6,25.0e6), (27.99e6,29.71e6) ]
ORION_BAND_SUB = [ (0.101e6,29.999999e6) ]
ORION_BANDS = { 'MAIN':ORION_BAND_MAIN, 'SUB':ORION_BAND_SUB }
class TT_orion(Tentec):
def __init__(self):
Tentec.__init__(self)
# Override Backend settings (for info print)
self.memory_range = ORION_MEMORY_CHANNELS
self.backend_id = ORION_BACKEND_ID
self.vfo_list= ORION_VFO_LIST
self.rx_list = ORION_RX_LIST
self.atten_dict= ORION_ATTEN_DICT
self.tx_list = ORION_TX_LIST
self.mode_list = ORION_MODE_LIST
self.bandpass_dict = ORION_BANDPASS_SETTINGS
self.agc_mode_list = ORION_AGC_MODES
self.mic_source_list = ORION_MIC_SOURCES
self.tx_ant_list = ORION_TX_ANTENNAS
self.rx_ant_list = ORION_RX_ANTENNAS
self.bands = ORION_BANDS
# Variables to hold our copy of rig's state
self.init_v = '' # string ("ORION START")
self.rx_mode_v = { } # mode, string
self.tx_mode_v = { } # mode, string
self.freq_v = { } # Hz, float
self.vfo_step_v = { } # Hz, float
self.bandpass_v = { } # entry is [ctr,width] (float)
self.agc_mode_v = { } # mode, string
self.agc_user_v = { } # user/prog agc settings, [x,y,z], float
self.rf_gain_v = { } # rf gain, float
self.af_gain_v = { } # af gain, float
self.rit_v = { } # rit offset, Hz, float
self.xit_v = { } # xit offset, Hz, float
self.mic_gain_v = { } # mic gain, float
self.speech_proc_v = { } # SP gain, float
self.noise_blank_v = { } # NB gain, float
self.noise_reduce_v= { } # NR gain, float
self.notch_auto_v = { } # AN gain, float
self.transmit_v = { 'TX':0 } # PTT setting, int/bool, needs init value
self.power_v = { } # Power, Watts, Float
self.strength_raw_v = { } # raw strength, int.
self.strength_v = { } # calibrated strength, dB, float
self.swr_raw_v = { } # fwd,rev,ratio (raw), [int,int,int]
self.swr_v = { } # swr, float
self.atten_v = { } # attenuator setting, string
self.preamp_v = { } # on/off, bool (int)
self.memory_channel_v = 1 # current channel, int
return
def info(self,tp,rx='',data=''):
if tp == T_PUT: return NO_WRITE
elif tp == T_GET:
return Backend.info(self,T_GET)
elif tp == T_TEST: return ACK # Yes, the method is defined.
else: return TP_INVALID # no puts for this command
def init(self,tp,rx='',data='/dev/ttyS0'): # Firmware initalize
# 'rx' is ignored!
# 'Put' performs serial initialization
if tp == T_PUT:
Tentec.serial_init(self,data)
r = self.transact('init: error','XX') # This is an Orion mystery
if is_nak(r): return r # (does it do anything?)
# Set up default VFO / Rx/Tx config (A<->MAIN, B<->SUB ?)
self.transact('vfo init','*KVABA') # main=vfoa, sub=vfob, tx=vfoa
# Set up default Antennas (MAIN/SUB <-> ANT1 ?)
self.transact('rx init','*KABNN') # main=sub=ant1, ''=ant2, ''=rx_ant
# Set up default spkr/phones configuration
self.transact('spkr setup','*UCBBB') # spkr=right=left="both main & sub"
self.init_v = r # remember me!
return ACK
elif tp == T_GET:
# 'Get' returns value obtained in init(T_PUT) e.g. "ORION START"
return self.init_v
elif tp == T_TEST: return ACK # Yes, the method is defined.
else: return TP_INVALID
def status(self,tp,rx=''): # Get rig status
if tp == T_PUT: return NO_WRITE
elif tp == T_GET:
if not self.ser: return NAK+'Serial port not open.'
r = self.transact('?V: rig disconnected?', '?V')
if is_nak(r): return r
self.init_v += r # add to stored init message
return r.strip() # "Version 1.372" or similar
elif tp == T_TEST: return ACK # Yes, the method is defined.
else: return TP_INVALID
def rx_mode(self,tp,rx='',mode=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
if not mode in ORION_MODE_LIST:
return NAK+'Unsupported mode %s' % mode
self.rx_mode_v[rx] = mode
cmd = '*R%cM%c' % ( ORION_RX_MAP[rx], ORION_MODE_MAP[mode] )
self.transact('put rx mode', cmd)
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
cmd = '?R%cM' % ORION_RX_MAP[rx] # the query
r = self.transact('get rx mode', cmd)
if is_nak(r): return r
if len(r) < 5: return NAK+'Bad response from Orion'
mval = r[4] # return = '@RMM0' e.g.
mymode = ''
for x in ORION_MODE_MAP.iterkeys(): # reverse lookup
if ORION_MODE_MAP[x] == mval:
mymode = x
break
self.rx_mode_v[rx] = mymode
return mymode
elif tp == T_TEST: return ACK
else: return TP_INVALID
def tx_mode(self,tp,tx='',mode=''):
# Note: tx mode always = main rx mode (hardware)
# side effect: rx='MAIN' mode changes
if tp == T_PUT:
if not tx in ORION_TX_LIST: return NAK+'invalid tx: %s' % tx
self.tx_mode_v[tx] = mode
self.rx_mode(T_PUT,'MAIN',mode) # tx = main rx
return ACK
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'invalid tx: %s' % tx
mymode = self.rx_mode(T_GET,'MAIN')
self.tx_mode_v[tx] = mymode
return mymode
elif tp == T_TEST: return ACK
else: return TP_INVALID
def freq(self,tp,v='',data=''): # using Orion's binary mode freqs
if tp == T_PUT:
try:
f = float(data) # Validity check
except ValueError:
return NAK+'freq: invalid vfo setting request: %s' % data
# Check if f is in a valid range.
# Rules for MAIN <> rules for SUB.
if not ORION_VFO_MAP.has_key(v): return NAK+'invalid vfo: %s' % v
if ORION_VFO_TO_RX[v] == 'MAIN':
if not in_band(ORION_BAND_MAIN,f):
return NAK+'freq: bad freq. for main rx/tx: %f' % f
else:
if not in_band(ORION_BAND_SUB,f):
return NAK+'freq: bad freq. for sub rx: %f' % f
self.freq_v[v] = f
fi = int(f)
if FREQIO_BINARY: # Construct binary command
cmd = '*%c' % ORION_VFO_MAP[v] + \
chr(fi>>24 & 0xff) + chr(fi>>16 & 0xff) + \
chr(fi>> 8 & 0xff) + chr(fi & 0xff)
else: # Construct alphanumeric command
cmd = '*%cF' % ORION_VFO_MAP[v] + str(fi)
self.transact('put freq',cmd)
return ACK
elif tp == T_GET:
# Orion's vfo is set modulo tuning step, so 7000010 -> 7000000,
# if tuning step > 10 Hz. Also, the actual value set may be
# rounded to an even Hz above 10 MHz...
if not ORION_VFO_MAP.has_key(v): return NAK+'invalid vfo: %s' % v
if FREQIO_BINARY:
cmd = '?%c' % ORION_VFO_MAP[v]
else:
cmd = '?%cF' % ORION_VFO_MAP[v]
r = self.transact('get_freq',cmd)
if is_nak(r): return r
if FREQIO_BINARY: # Get freq in binary style
tup4 = tuple(map(ord,r[2:]))
freq = float(reduce(lambda x,y: 256*x + y,tup4)) # Go, Python!
else: # Do it the alpha way
try:
freq = float(r[3:]) # Being a little paranoid here
self.freq_v[v] = freq
except ValueError: # stop gap measure if invalid freq rcvd
return NAK+'freq: bad response: %s' % r
return '%.f' % freq
elif tp == T_TEST: return ACK
else: return TP_INVALID
def vfo_step(self,tp,v='',data=''): # Set Tuning Step
if tp == T_PUT:
try:
fstep = float(data)
except:
return NAK+'vfo_step bad request %s' %data
# validate fstep - has to be in the list
if not int(fstep) in ORION_VFO_STEP_LIST:
return NAK+'vfo_step: Invalid freq. step size %f' % fstep
self.vfo_step_v[v] = data
# Problem: Orion provides a "receiver step", not a "vfo step"
# So you need to decode the vfo assignments to do the "vfo step"
# correctly. WE ASSUME VFOA <-> MAIN and VFOB <-> SUB.
rx = ORION_VFO_TO_RX[v]
cmd = '*R%cI%s' % (ORION_RX_MAP[rx], int(fstep))
self.transact('put vfo step',cmd)
return ACK
elif tp == T_GET:
rx = ORION_VFO_TO_RX[v]
cmd = '?R%cI' % ORION_RX_MAP[rx]
r = self.transact('get_vfo_step',cmd)
if is_nak(r): return r
try:
fstep = float(r[4:])
except:
return NAK+'vfo_step bad response %s' % r
self.vfo_step_v[v] = r[4:]
return '%.f' % fstep
elif tp == T_TEST: return ACK
else: return TP_INVALID
#
# Orion's passband is determined by the receiver "filter" (100 - 6000 Hz, "F")
# and its "passband" (-2500 -> 0 -> +2500 Hz, "P"). These control operation of
# the DSP.
# Rigserve allows setting the passband either as "offset" and
# "width" or as "low" and "high" - the lower and upper edges of the
# passband (Hz).
# Note that Orion works with "offset" (PBT) and "width" OR with High / Low
# Cut (on front panel), but the ASCII commands are for PBT and Filter = BW.
def bandpass_limits(self,tp,rx='',data=''): # low_high: string
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
l_h = data.split(None,2) # [low-Hz, high-Hz]
offset = float( l_h[0] )
width = float( l_h[1] ) - float( l_h[0] ) # hi - low
except:
return NAK+'bandpass_limits bad request %s' % data
cmd = '*R%cF%d' % (ORION_RX_MAP[rx], int(width) )
self.transact('put filter bw',cmd) # Send filter BW
time.sleep(ORION_DSP_DELAY)
cmd = '*R%cP%d' % (ORION_RX_MAP[rx], int(offset) )
self.transact('put pbt',cmd) # Send PBT offset
self.bandpass_v[rx] = [offset, width] # remember as floats
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
cmd = '?R%cF' % ORION_RX_MAP[rx]
r = self.transact('get_bandpass_limits 1',cmd) # Get filter BW
if is_nak(r): return r
try:
width = float(r[4:])
except:
return NAK+'bandpass_limits bad response 1 %s' % r
cmd = '?R%cP' % ORION_RX_MAP[rx]
r = self.transact('get_bandpass_limits 2',cmd) # Get PBT offset
if is_nak(r): return r
try:
offset = float(r[4:])
except:
return NAK+'bandpass_limits band response 2 %s' % r
o_w = '%.f %.f' % (offset, width)
self.bandpass_v[rx] = [offset,width] # remember as floats
low = offset
high = offset + width
return '%.f %.f' % (low,high) # return limits, char Hz
elif tp == T_TEST: return ACK
else: return TP_INVALID
def bandpass(self,tp,rx='',data=''): # offset_width = string
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
o_w = data.split(None,2)
offset = float(o_w[0])
width = float(o_w[1])
except:
return NAK+'bandpass: invalid request %s' % data
print "DEBUG **** bandpass offs, wid = ",offset,width
cmd = '*R%cP%d' % ( ORION_RX_MAP[rx], int(offset) )
self.transact('bandpass put pbt',cmd) # Send PBT offset
time.sleep(ORION_DSP_DELAY)
cmd = '*R%cF%d' % ( ORION_RX_MAP[rx], int(width) )
self.transact('bandpass put bw',cmd) # send filter BW
self.bandpass_v[rx] = [offset, width] # remember as float Hz
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
cmd = '?R%cF' % ORION_RX_MAP[rx]
r = self.transact('bandpass get bw',cmd)
if is_nak(r): return r
try:
width = float (r[4:])
except:
return NAK+'bandpass: response error 1: %s' % r
cmd = '?R%cP' % ORION_RX_MAP[rx]
r = self.transact('bandpass get pbt',cmd)
if is_nak(r): return r
try:
offset = float(r[4:])
except:
return NAK+'bandpass: response error 2: %s' % r
self.bandpass_v[rx] = [offset,width] # remember as floats
return '%.f %.f' % (offset,width)
elif tp == T_TEST: return ACK
else: return TP_INVALID
def bandpass_standard(self,tp,rx='',data=''):
# set according to standard bandpass table - NARROW/MEDIUM/WIDE etc.
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
if not self.bandpass_dict.has_key(data):
return NAK+'Invalid bandpass setting'
ans = self.bandpass(T_PUT,rx,self.bandpass_dict[data])
return ans # ACK/NAK etc
elif tp == T_GET: return NO_READ
elif tp == T_TEST: return ACK
else: return TP_INVALID
def agc_mode(self,tp,rx='',data=''):
# put or get current AGC mode (FAST/MEDIUM/SLOW/USER etc)
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
cmd = '*R%cA%c' % ( ORION_RX_MAP[rx], ORION_AGC_MODE_MAP[data] )
self.transact('put agc_mode',cmd) # Send AGC mode
self.agc_mode_v[rx] = data # remember as string
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
cmd = '?R%cA' % ORION_RX_MAP[rx]
r = self.transact('get agc_mode',cmd)
if is_nak(r): return r
modec = r[4]
for mm in ORION_AGC_MODE_MAP.iterkeys():
if ORION_AGC_MODE_MAP[mm] == modec:
self.agc_mode_v[rx] = mm # remember as string
return mm
return NAK
elif tp == T_TEST: return ACK
else: return TP_INVALID
# put/get prog mode agc parameters: tc, hold, threshold (sec, sec, uV)
# Note that Orion also lets us set FAST/MEDIUM/SLOW AGC parameters, but we
# do not implement this capability.
def agc_user(self,tp,rx='',data=''):
if tp == T_PUT:
# input: time-constant (sec), hold time (sec), threshold (uV)
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
list3 = data.split(None,3)
list3f = map(float,list3)
except:
return NAK+'agc_user bad request %s' % data
# (Should we save & restore current mode, which might not be 'prog'?)
self.agc_mode(T_PUT,rx, 'PROG') # ensure 'prog' mode
db_per_sec = 3.0 / list3f[0] # Orion's choice of unit
cmd = '*R%cAD%.4f' % ( ORION_RX_MAP[rx], db_per_sec)
self.transact('put db/sec',cmd)
cmd = '*R%cAH%.4f' % ( ORION_RX_MAP[rx], list3f[1] )
self.transact('put hold time',cmd)
cmd = '*R%cAT%.4f' % ( ORION_RX_MAP[rx], list3f[2] )
self.transact('put threshold',cmd)
self.agc_user_v[rx] = list3f # remember as floats
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
GAU_WT = 0.10 # seems to prevent timeouts
self.agc_mode(T_PUT,rx, 'PROG')
cmd = '?R%cAD' % ORION_RX_MAP[rx]
time.sleep(GAU_WT)
r = self.transact('get db/sec',cmd)
if is_nak(r): return r
try:
db_per_sec = float(r[5:])
except:
return NAK+'agc_user bad response 1 %s' % r
tc = 3.0 / db_per_sec
cmd = '?R%cAH' % ORION_RX_MAP[rx]
time.sleep(GAU_WT)
r = self.transact('get hold time',cmd)
if is_nak(r): return r
try:
hold = float(r[5:])
except:
return NAK+'agc_user bad response 2 %s' % r
cmd = '?R%cAT' % ORION_RX_MAP[rx]
time.sleep(GAU_WT)
r = self.transact('get threshold',cmd)
if is_nak(r): return r
try:
thr = float(r[5:])
except:
return NAK+'agc_user bad response 3 %s' % r
self.agc_user_v[rx] = [tc, hold, thr]
return '%f %f %f' % (tc, hold, thr)
elif tp == T_TEST: return ACK
else: return TP_INVALID
def rf_gain(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
gain = float(data)
except:
return NAK+'rf_gain bad request %s' % data
self.transact('put rf_gain',
'*R%cG%03d' % ( ORION_RX_MAP[rx], int(gain * 100.0) ))
self.rf_gain_v[rx] = gain
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get_rf_gain',
'?R%cG' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
gain = float(r[4:])/100.
except:
return NAK+'rf_gain bad response %s' % r
self.rf_gain_v[rx] = gain
return '%.3f' % gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
def af_gain(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
gain = float(data)
except:
return NAK+'af_gain bad request %s' % data
# Note: We should have set up the spkr/phone selections in init.
self.transact('put af_gain',
'*U%c%03d' % ( ORION_RX_MAP[rx], int(gain * 256.0) ))
self.af_gain_v[rx] = gain
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get_af_gain',
'?U%c' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
gain = float( r[3:] ) / 256.0
except:
return NAK+'af_gain bad response %s' % r
self.af_gain_v[rx] = gain
return '%.3f' % gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
def rit(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
offset = float(data)
except:
return NAK+'rit bad request %s' % data
if not in_range(ORION_RIT_RANGE_LIST,offset):
return NAK+'rit: value out of range: %s' % data
self.transact('put rit',
'*R%cR%d' % ( ORION_RX_MAP[rx], int(offset)))
self.rit_v[rx] = offset
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get rit',
'?R%cR' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
offset = float( r[4:] )
except:
return NAK+'rit bad response %s' % r
self.rit_v[rx] = offset
return '%.f' % offset
elif tp == T_TEST: return ACK
else: return TP_INVALID
def xit(self,tp,tx='',data=''):
if tp == T_PUT:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
try:
offset = float(data)
except:
return NAK+'xit bad request %s' % data
if not in_range(ORION_XIT_RANGE_LIST,offset):
return NAK+'xit: value out of range: %s' % data
self.transact('put xit', '*RMX%d' % int(offset))
self.xit_v[tx] = offset
return ACK
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
r = self.transact('get xit', '?RMX')
if is_nak(r): return r
try:
offset = float( r[4:] )
except:
return NAK+'xit bad response %s' % r
self.xit_v[tx] = offset
return '%.f' % offset
elif tp == T_TEST: return ACK
else: return TP_INVALID
def mic_gain(self,tp,tx='',data=''):
if tp == T_PUT:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
try:
gain = float(data)
except:
return NAK+'mic_gain bad request %s' % data
self.transact('put mic_gain', '*TM%03d' % int(gain * 100.0) )
self.mic_gain_v[tx] = gain
return ACK
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
r = self.transact('get mic_gain', '?TM')
if is_nak(r): return r
try:
gain = float( r[3:] ) / 100.0
except:
return NAK+'mic_gain bad response %s' % r
self.mic_gain_v[tx] = gain
return '%.3f' % gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
# def mic_source(self,tp,tx,source): not supported
def speech_proc(self,tp,tx='',data=''):
if tp == T_PUT:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
try:
sp_gain = float(data)
except:
return NAK+'sp bad request %s' % data
self.transact('put speech_proc', '*TS%2d' % int( 10.0 * sp_gain ) )
self.speech_proc_v[tx] = sp_gain
return ACK
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
r = self.transact('get_speech_proc', '?TS')
if is_nak(r): return r
try:
sp_gain = float( r[3:] ) / 10.0
except:
return NAK+'sp bad response %s' % r
self.speech_proc_v[tx] = sp_gain
return '%.3f' % sp_gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
def noise_blank(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
nb_gain = float(data)
except:
return NAK+'nb bad request %s' % data
self.transact('put noise_blank',
'*R%cNB%d' % ( ORION_RX_MAP[rx], int(10.0 * nb_gain) ))
self.noise_blank_v[rx] = nb_gain
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get_noise_blank',
'?R%cNB' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
nb_gain = float( r[5:] ) / 10.0
except:
return NAK+'nb bad response %s' % r
self.noise_blank_v[rx] = nb_gain
return '%.3f' % nb_gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
def noise_reduce(self,tp,rx='',data=''): # Broken in 1.372 & 1.373b5
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
nr_gain = float(data)
except:
return NAK+'nr bad request %s' % data
self.transact('put noise_reduce',
'*R%cNN%d' % ( ORION_RX_MAP[rx], int(10.0 * nr_gain) ))
self.noise_reduce_v[rx] = nr_gain
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get noise_reduce',
'?R%cNN' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
nr_gain = float( r[5:] ) / 10.0
except:
return NAK+'nr bad response %s' % r
self.noise_reduce_v[rx] = nr_gain
return '%.3f' % nr_gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
def notch_auto(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
try:
gain = float(data)
except:
return NAK+'notch_auto bad request %s' % data
self.transact('put notch_auto',
'*R%cNA%d' % ( ORION_RX_MAP[rx], int(10.0 * gain) ))
self.notch_auto_v[rx] = gain
return ACK
elif tp == T_GET: # read auto_notch broken 1.372
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get_notch_auto',
'?R%cNA' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
gain = float( r[5:] ) / 10.0
except:
return NAK+'notch_auto bad response %s' % r
self.notch_auto_v[rx] = gain
return '%.3f' % gain
elif tp == T_TEST: return ACK
else: return TP_INVALID
# manual (non-auto) notch control not available in 1.372
def transmit(self,tp,tx='',data=''): # =="0" -> off, otherwise -> on
if tp == T_PUT:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
ptt = int(data)
pttcmd = '*TU'
if ptt==1: pttcmd = '*TK'
self.transact('put transmit', pttcmd )
self.transmit_v[tx] = ptt
return ACK
if tp == T_GET: # Orion has no query for ptt
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
return self.transmit_v[tx] # Use self-stored value
elif tp == T_TEST: return ACK
else: return TP_INVALID
def power(self,tp,tx='',data=''): # Power in watts, max 100.
if tp == T_PUT:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
try:
pwr = float(data) # =0 -> disable tx
except:
return NAK+'power bad request %s' % data
pwr2 = min(pwr,100.0)
self.transact('put power', '*TP%d' % int( pwr2 ) )
self.power_v[tx] = pwr2
return ACK
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
r = self.transact('get power', '?TP')
if is_nak(r): return r
try:
pwr = float( r[3:] )
except:
return NAK+'power bad response %s' % r
self.power_v[tx] = pwr
return '%.f' % pwr
elif tp == T_TEST: return ACK
else: return TP_INVALID
# to do:
# tx_ant()
# rx_ant()
def memory_channel(self,tp,vfo='',data='1'):
if tp == T_PUT:
ch = int(data)
if not in_range(ORION_MEMORY_CHANNELS,ch):
return NAK+'Invalid memory channel'
self.memory_channel_v = ch
elif tp == T_GET:
if not in_range(ORION_MEMORY_CHANNELS,ch):
return NAK+'Invalid memory channel'
return '%d' % self.memory_channel_v
elif tp == T_TEST: return ACK
else: return TP_INVALID
# The Orion store memory operation stores a vfo frequency from VFO A or B,
# but also captures MODE, BW, and PBT settings. Note that this breaks
# the "independent vfo" theory, since MODE, etc, refer to _receiver_
# settings. Beware of the side effects!
def vfo_memory(self,tp,vfo='',data=''):
if tp == T_PUT:
if not ORION_VFO_MAP.has_key(vfo): return NAK+'invalid vfo %s' % vfo
self.freq(T_PUT,vfo,data)
ch = int(self.memory_channel_v)
self.transact('put vfo_memory',
'*KW%c%d' % (ORION_VFO_MAP[vfo],ch))
return ACK
elif tp == T_GET:
if not ORION_VFO_MAP.has_key(vfo): return NAK+'invalid vfo %s' % vfo
ch = int(self.memory_channel_v)
self.transact('get vfo_memory',
'*KR%c%d' % (ORION_VFO_MAP[vfo],ch)) # Send mem to vfo.
time.sleep(ORION_DSP_DELAY) # (change to bandpass)
# possibly should use Backend methods to store new Mode, BW,
# and PBT, as well as VFO freq. However, this requires an
# analysis of which rx is currently attached to this vfo!
time.sleep(0.2) # Avoid read timeout
ans = self.freq(T_GET,vfo) # NB the vfo is not restored
return ans # returns only the freq.
elif tp == T_TEST: return ACK
else: return TP_INVALID
def strength_raw(self,tp,rx='',data=''): # Get uncalibrated int value
if tp == T_PUT: return NO_WRITE
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get strength_raw', '?S' )
if is_nak(r): return r
# Can be @SRMmainSsub in Rx mode or @STFfffRrrrSsss in Tx
if not r[2] == 'R':
return NAK+'get_strength_raw: must be in rcv mode'
x1, x2 = r.split('M')
xmain, xsub = x2.split('S') # Well, that's one way to parse!
if rx == 'MAIN':
value = xmain.lstrip('0') # trim leading zeroes
else:
value = xsub.lstrip('0')
self.strength_raw_v[rx] = int (value)
return value
elif tp == T_TEST: return ACK
else: return TP_INVALID
def strength(self,tp,rx='',data=''):
if tp == T_PUT: return NO_WRITE
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
if self.strength_table == []: return NAK+'No S-meter cal.'
raws= self.strength_raw(T_GET,rx)
try:
raw = float(raws) # uncalibrated float, paranoid!
except:
return NAK+'strength: invalid raw response %s' % raws
p = (0.,-45.) # "previous" pair
for t in self.strength_table:
if t[0] > raw: # piecewise linear interpolation
cal = p[1] + (raw-p[0]) * ( (t[1]-p[1])/(t[0]-p[0]) )
break
else:
p = t # remember "previous"
else: # if raw is higher than the highest cal point
cal = t[1] # Use highest dB value
self.strength_v[rx] = cal
return '%.1f' % cal # return float dB in string
elif tp == T_TEST: return ACK
else: return TP_INVALID
def swr_raw(self,tp,tx='',data=''): # Get rig-dependent string,
# e.g., 'fwd rev ratio'
if tp == T_PUT: return NO_WRITE
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
r = self.transact('get swr_raw', '?S')
if is_nak(r): return r
# hopefully @STFfffRrrrSsss (fwd, rev, ratio)
if not r[2] == 'T':
return NAK+'swr_raw: must be in xmit mode'
x1,x2 = r.split('F')
xfwd,x3 = x2.split('R')
xrev,xratio = x3.split('S')
self.swr_raw_v[tx] = [ int(xfwd), int(xrev), int(xratio) ]
return '%s %s %s' % (xfwd, xrev, xratio) # string of 3 vals
elif tp == T_TEST: return ACK
else: return TP_INVALID
def swr(self,tp,tx='',data=''): # get the actual SWR ratio (float)
if tp == T_PUT: return NO_WRITE
elif tp == T_GET:
if not tx in ORION_TX_LIST: return NAK+'tx invalid: %s' % tx
raw = self.swr_raw(T_GET,tx) # Get raw values
if raw.startswith(NAK): return raw
f,r,s = raw.split(None,3)
if int(s) >= 800:
ratio = 999. # off-scale
else:
ratio = float(s) / 256. # Orion's idea of swr?
self.swr_v[tx] = ratio
return '%.2f' % ratio
elif tp == T_TEST: return ACK
else: return TP_INVALID
def atten(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
if not self.atten_dict.has_key(data):
return NAK+'Invalid attenuator request: %s' % data
try:
o_att = self.atten_dict[data]
except:
return NAK+'atten bad request %s' % data
self.transact('put atten',
'*R%cT%d' % ( ORION_RX_MAP[rx], o_att) )
self.atten_v[rx] = data # Save actual setting (string)
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
r = self.transact('get_atten',
'?R%cT' % ORION_RX_MAP[rx])
if is_nak(r): return r
try:
o_att = int( r[4:] ) # Orion's attenuator code
except:
return NAK+'atten bad response %s' % r
for att_s in self.atten_dict: # reverse dictionary lookup
if self.atten_dict[att_s] == o_att: break
else:
return NAK+'Invalid attenuator setting read: %d' % o_att
self.atten_v[rx] = att_s # save the string value
return att_s # and return it
elif tp == T_TEST: return ACK
else: return TP_INVALID
def preamp(self,tp,rx='',data=''):
if tp == T_PUT:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
onoff = bool(data)
if rx == 'MAIN': # SUB has no preamp
self.transact('put preamp', '*RME%d' % onoff )
self.preamp_v[rx] = onoff
return ACK
elif tp == T_GET:
if not rx in ORION_RX_LIST: return NAK+'rx invalid: %s' % rx
if rx == 'MAIN':
r = self.transact('get_preamp', '?RME')
if is_nak(r): return r
onoff = ( r[4:5] == '1' )
self.preamp_v[rx] = onoff
return '%d' % onoff
else:
self.preamp_v[rx] = False # No preamp on sub rx
return False
elif tp == T_TEST: return ACK
else: return TP_INVALID
# Firmware version-specific subclasses.
# We are asking the client to declare which version he is using.
# Alternatively, we could have used the get_status info for this purpose and
# relied on the single TT_orion class, but that would have been messier.
class TT_orion_v1(TT_orion):
# Important note: The original Orion (model 565) when running version 1.xxx
# firmware has problems with serial command timing. The command/response sequence
# can be corrupted if commands come too fast. Rigserve users are strongly
# recommended to upgrade to version 2.xxx firmware for this reason.
# Orion with "v1" S-meter cal. (v1.372) and any other "v1" behavior
# Updated to correspond to Hamlib driver, 11/2007
def __init__(self):
# Calibration (hardware_value, dB value)
# NB - these values were measured, but should not be relied upon
# too much! Also, this calibration is only measured on the main rx.
SMETER_CAL_v1372 = [
( 10., -47. ), # S 1.5 min meter indication
( 13., -42. ),
( 18., -37. ),
( 22., -32. ),
( 27., -27. ),
( 32., -18. ),
( 37., -11. ),
( 42., -4. ), # S9
( 47., -1. ),
( 52., 10. ),
( 57., 20. ),
( 65., 30. ),
( 74., 40. ) #severe dsp quantization error
] # at high end of scale
TT_orion.__init__(self)
self.strength_table = SMETER_CAL_v1372
return None
class TT_orion_v2(TT_orion):
# Orion with "v2" S-meter cal. and "v2" behavior
# Will it stay the same for all v2.x?
def __init__(self):
SMETER_CAL_v2059d = [
(10., -48.), # S1 = min. indication
(24., -42.),
(38., -36.),
(47., -30.),
(61., -24.),
(70., -18.),
(79., -12.),
(84., -6.),
(94., 0.), # S9
(103., 10.),
(118., 20.),
(134., 30.),
(147., 40.),
(161., 50.) ]
TT_orion.__init__(self)
self.strength_table = SMETER_CAL_v2059d
return None
# operate (not available)
if __name__ == '__main__':
# This is the place to test the subclass and ic module routines.
# THIS CODE IS NOT EXECUTED IF THIS MODULE IS INVOKED FROM ANOTHER PYTHON ROUTINE.
rig = TT_orion_v2()
print rig.init(T_PUT,data='/dev/ham.orion')
print rig.init(T_GET)
print rig.status(T_GET)
print rig.rx_mode(T_GET,'MAIN')
print rig.rx_mode(T_PUT,'MAIN','LSB')
print rig.rx_mode(T_GET,'MAIN')
print rig.rx_mode(T_PUT,'MAIN','USB')
print rig.freq(T_GET,'VFOA')
print rig.freq(T_PUT,'VFOA', '7100120')
print rig.freq(T_GET,'VFOA')
print rig.freq(T_GET,'VFOA')
print rig.vfo_step(T_GET,'VFOA')
print rig.vfo_step(T_PUT,'VFOA','100')
print rig.vfo_step(T_GET,'VFOA')
print rig.vfo_step(T_PUT,'VFOA','1')
print rig.bandpass(T_GET,'MAIN')
print rig.bandpass_limits(T_PUT,'MAIN','222 555')
print rig.bandpass_limits(T_GET,'MAIN')
print rig.bandpass_standard(T_PUT,'MAIN','MEDIUM')
print rig.bandpass(T_GET,'MAIN')
print rig.atten(T_PUT,'SUB','OFF')
print rig.atten(T_GET,'SUB')
print rig.agc_user(T_GET,'MAIN')
print rig.agc_user(T_GET,'MAIN')
print rig.info(T_GET)
print rig.strength_raw(T_GET,'MAIN')
print rig.strength(T_GET,'MAIN')
print rig.freq(T_GET,'VFOA')
print rig.freq(T_PUT,'VFOA', '7100120')
print rig.freq(T_GET,'VFOA')
print rig.freq(T_GET,'VFOA')
print rig.preamp(T_GET,'MAIN')
print rig.atten(T_GET,'MAIN')
print rig.mic_gain(T_PUT,'TX','0.51')
print rig.mic_gain(T_GET,'TX')
print rig.af_gain(T_PUT,'MAIN','0.12')
print rig.af_gain(T_GET,'MAIN')
print rig.rf_gain(T_PUT,'MAIN','0.79')
print rig.rf_gain(T_GET,'MAIN')
|
Razesdark/mps-youtube | refs/heads/develop | mps_youtube/player.py | 1 | import os
import sys
import random
import logging
import math
import time
import shlex
import subprocess
import socket
from urllib.error import HTTPError, URLError
from abc import ABCMeta, abstractmethod
from . import g, screen, c, streams, history, content, config, util
from .commands import lastfm
mswin = os.name == "nt"
not_utf8_environment = mswin or "UTF-8" not in sys.stdout.encoding
class BasePlayer:
_playbackStatus = "Paused"
_last_displayed_line = None
@property
def PlaybackStatus(self):
return self._playbackStatus
@PlaybackStatus.setter
def PlaybackStatus(self, value):
self._playbackStatus = value
if value == 'Playing':
paused = False
else:
paused = True
g.mprisctl.send(('pause', paused))
def play(self, songlist, shuffle=False, repeat=False, override=False):
""" Play a range of songs, exit cleanly on keyboard interrupt. """
if config.ALWAYS_REPEAT.get:
repeat = True
self.songlist = songlist
self.shuffle = shuffle
self.repeat = repeat
self.override = override
if shuffle:
random.shuffle(self.songlist)
self.song_no = 0
while 0 <= self.song_no <= len(self.songlist)-1:
self.song = self.songlist[self.song_no]
g.content = self._playback_progress(self.song_no, self.songlist,
repeat=repeat)
if not g.command_line:
screen.update(fill_blank=False)
hasnext = len(self.songlist) > self.song_no + 1
if hasnext:
streams.preload(self.songlist[self.song_no + 1],
override=self.override)
if config.SET_TITLE.get:
util.set_window_title(self.song.title + " - mpsyt")
self.softrepeat = repeat and len(self.songlist) == 1
if g.scrobble:
lastfm.set_now_playing(g.artist, g.scrobble_queue[self.song_no])
try:
self.video, self.stream = stream_details(self.song,
override=self.override,
softrepeat=self.softrepeat)
self._playsong()
except KeyboardInterrupt:
logging.info("Keyboard Interrupt")
util.xprint(c.w + "Stopping... ")
screen.reset_terminal()
g.message = c.y + "Playback halted" + c.w
raise KeyboardInterrupt
break
# skip forbidden, video removed/no longer available, etc. tracks
except TypeError:
self.song_no += 1
pass
if config.SET_TITLE.get:
util.set_window_title("mpsyt")
if self.song_no == -1:
self.song_no = len(songlist) - 1 if repeat else 0
elif self.song_no == len(self.songlist) and repeat:
self.song_no = 0
# To be defined by subclass based on being cmd player or library
# When overriding next and previous don't forget to add the following
# if g.scrobble:
# lastfm.scrobble_track(g.artist, g.album, g.scrobble_queue[self.song_no])
def next(self):
pass
def previous(self):
pass
def stop(self):
pass
###############
def seek(self):
pass
def _playsong(self, failcount=0, softrepeat=False):
""" Play song using config.PLAYER called with args config.PLAYERARGS.
"""
# pylint: disable=R0911,R0912
if not config.PLAYER.get or not util.has_exefile(config.PLAYER.get):
g.message = "Player not configured! Enter %sset player <player_app> "\
"%s to set a player" % (c.g, c.w)
return
if config.NOTIFIER.get:
subprocess.Popen(shlex.split(config.NOTIFIER.get) + [self.song.title])
size = streams.get_size(self.song.ytid, self.stream['url'])
songdata = (self.song.ytid, self.stream['ext'] + " " + self.stream['quality'],
int(size / (1024 ** 2)))
self.songdata = "%s; %s; %s Mb" % songdata
screen.writestatus(self.songdata)
self._launch_player()
if config.HISTORY.get:
history.add(self.song)
def _launch_player(self):
""" Launch player application. """
pass
def send_metadata_mpris(self):
metadata = util._get_metadata(self.song.title) if config.LOOKUP_METADATA.get else None
if metadata is None:
arturl = "https://i.ytimg.com/vi/%s/default.jpg" % self.song.ytid
metadata = (self.song.ytid, self.song.title, self.song.length,
arturl, [''], '')
else:
arturl = metadata['album_art_url']
metadata = (self.song.ytid, metadata['track_title'],
self.song.length, arturl,
[metadata['artist']], metadata['album'])
if g.mprisctl:
g.mprisctl.send(('metadata', metadata))
def _playback_progress(self, idx, allsongs, repeat=False):
""" Generate string to show selected tracks, indicate current track. """
# pylint: disable=R0914
# too many local variables
cw = util.getxy().width
out = " %s%-XXs%s%s\n".replace("XX", str(cw - 9))
out = out % (c.ul, "Title", "Time", c.w)
multi = len(allsongs) > 1
for n, song in enumerate(allsongs):
length_orig = util.fmt_time(song.length)
length = " " * (8 - len(length_orig)) + length_orig
i = util.uea_pad(cw - 14, song.title), length, length_orig
fmt = (c.w, " ", c.b, i[0], c.w, c.y, i[1], c.w)
if n == idx:
fmt = (c.y, "> ", c.p, i[0], c.w, c.p, i[1], c.w)
cur = i
out += "%s%s%s%s%s %s%s%s\n" % fmt
out += "\n" * (3 - len(allsongs))
pos = 8 * " ", c.y, idx + 1, c.w, c.y, len(allsongs), c.w
playing = "{}{}{}{} of {}{}{}\n\n".format(*pos) if multi else "\n\n"
keys = self._help(short=(not multi and not repeat))
out = out if multi else content.generate_songlist_display(song=allsongs[0])
if config.SHOW_PLAYER_KEYS.get and keys is not None:
out += "\n" + keys
else:
playing = "{}{}{}{} of {}{}{}\n".format(*pos) if multi else "\n"
out += "\n" + " " * (cw - 19) if multi else ""
fmt = playing, c.r, cur[0].strip()[:cw - 19], c.w, c.w, cur[2], c.w
out += "%s %s%s%s %s[%s]%s" % fmt
out += " REPEAT MODE" if repeat else ""
return out
def make_status_line(self, elapsed_s, prefix, songlength=0, volume=None):
self._line = self._make_status_line(elapsed_s, prefix, songlength,
volume=volume)
if self._line != self._last_displayed_line:
screen.writestatus(self._line)
self._last_displayed_line = self._line
def _make_status_line(self, elapsed_s, prefix, songlength=0, volume=None):
""" Format progress line output. """
# pylint: disable=R0914
display_s = elapsed_s
display_h = display_m = 0
if elapsed_s >= 60:
display_m = display_s // 60
display_s %= 60
if display_m >= 60:
display_h = display_m // 60
display_m %= 60
pct = (float(elapsed_s) / songlength * 100) if songlength else 0
status_line = "%02i:%02i:%02i %s" % (
display_h, display_m, display_s,
("[%.0f%%]" % pct).ljust(6)
)
if volume:
vol_suffix = " vol: %d%%" % volume
else:
vol_suffix = ""
cw = util.getxy().width
prog_bar_size = cw - len(prefix) - len(status_line) - len(vol_suffix) - 7
progress = int(math.ceil(pct / 100 * prog_bar_size))
status_line += " [%s]" % ("=" * (progress - 1) +
">").ljust(prog_bar_size, ' ')
return prefix + status_line + vol_suffix
class CmdPlayer(BasePlayer):
def next(self):
if g.scrobble:
lastfm.scrobble_track(g.artist, g.album,
g.scrobble_queue[self.song_no])
self.terminate_process()
self.song_no += 1
def previous(self):
if g.scrobble:
lastfm.scrobble_track(g.artist, g.album,
g.scrobble_queue[self.song_no])
self.terminate_process()
self.song_no -= 1
def stop(self):
self.terminate_process()
self.song_no = len(self.songlist)
def terminate_process(self):
self.p.terminate()
# If using shell=True or the player
# requires some obscure way of killing the process
# the child class can define this function
def _generate_real_playerargs(self):
pass
def clean_up(self):
pass
def launch_player(self, cmd):
pass
def _help(self, short=True):
pass
def _launch_player(self):
""" Launch player application. """
cmd = self._generate_real_playerargs()
util.dbg("playing %s", self.song.title)
util.dbg("calling %s", " ".join(cmd))
# Fix UnicodeEncodeError when title has characters
# not supported by encoding
cmd = [util.xenc(i) for i in cmd]
self.send_metadata_mpris()
try:
self.launch_player(cmd)
except OSError:
g.message = util.F('no player') % config.PLAYER.get
return None
finally:
if g.mprisctl:
g.mprisctl.send(('stop', True))
if self.p and self.p.poll() is None:
self.p.terminate() # make sure to kill mplayer if mpsyt crashes
self.clean_up()
def stream_details(song, failcount=0, override=False, softrepeat=False):
"""Fetch stream details for a song."""
# don't interrupt preloading:
while song.ytid in g.preloading:
screen.writestatus("fetching item..")
time.sleep(0.1)
try:
streams.get(song, force=failcount, callback=screen.writestatus)
except (IOError, URLError, HTTPError, socket.timeout) as e:
util.dbg("--ioerror in stream_details call to streams.get %s", str(e))
if "Youtube says" in str(e):
g.message = util.F('cant get track') % (song.title + " " + str(e))
return
elif failcount < g.max_retries:
util.dbg("--ioerror - trying next stream")
failcount += 1
return stream_details(song, failcount=failcount, override=override, softrepeat=softrepeat)
elif "pafy" in str(e):
g.message = str(e) + " - " + song.ytid
return
except ValueError:
g.message = util.F('track unresolved')
util.dbg("----valueerror in stream_details call to streams.get")
return
if failcount == g.max_retries:
raise TypeError()
try:
video = ((config.SHOW_VIDEO.get and override != "audio") or
(override in ("fullscreen", "window", "forcevid")))
m4a = "mplayer" not in config.PLAYER.get
cached = g.streams[song.ytid]
stream = streams.select(cached, q=failcount, audio=(not video), m4a_ok=m4a)
# handle no audio stream available, or m4a with mplayer
# by switching to video stream and suppressing video output.
if (not stream or failcount) and not video:
util.dbg(c.r + "no audio or mplayer m4a, using video stream" + c.w)
override = "a-v"
video = True
stream = streams.select(cached, q=failcount, audio=False, maxres=1600)
if not stream:
raise IOError("No streams available")
return (video, stream)
except (HTTPError) as e:
# Fix for invalid streams (gh-65)
util.dbg("----htterror in stream_details call to gen_real_args %s", str(e))
if failcount < g.max_retries:
failcount += 1
return stream_details(song, failcount=failcount,
override=override, softrepeat=softrepeat)
else:
g.message = str(e)
return
except IOError as e:
# this may be cause by attempting to play a https stream with
# mplayer
# ====
errmsg = e.message if hasattr(e, "message") else str(e)
g.message = c.r + str(errmsg) + c.w
return
|
ATIX-AG/ansible | refs/heads/devel | test/units/modules/network/f5/test_bigip_partition.py | 27 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_partition import Parameters
from library.modules.bigip_partition import ModuleManager
from library.modules.bigip_partition import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_partition import Parameters
from ansible.modules.network.f5.bigip_partition import ModuleManager
from ansible.modules.network.f5.bigip_partition import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
description='my description',
route_domain=0
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.description == 'my description'
assert p.route_domain == 0
def test_module_parameters_string_domain(self):
args = dict(
name='foo',
route_domain='0'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.route_domain == 0
def test_api_parameters(self):
args = dict(
name='foo',
description='my description',
defaultRouteDomain=1
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.description == 'my description'
assert p.route_domain == 1
class TestManagerEcho(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_partition(self, *args):
set_module_args(dict(
name='foo',
description='my description',
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
def test_create_partition_idempotent(self, *args):
set_module_args(dict(
name='foo',
description='my description',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_tm_auth_partition.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is False
def test_update_description(self, *args):
set_module_args(dict(
name='foo',
description='another description',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_tm_auth_partition.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['description'] == 'another description'
def test_update_route_domain(self, *args):
set_module_args(dict(
name='foo',
route_domain=1,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_tm_auth_partition.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['route_domain'] == 1
|
bcrochet/eve | refs/heads/master | eve/methods/post.py | 1 | # -*- coding: utf-8 -*-
"""
eve.methods.post
~~~~~~~~~~~~~~~~
This module imlements the POST method, supported by the resources
endopints.
:copyright: (c) 2017 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import current_app as app, abort
from eve.utils import config, parse_request, debug_error_message
from eve.auth import requires_auth
from eve.defaults import resolve_default_values
from eve.validation import ValidationError
from eve.methods.common import parse, payload, ratelimit, \
pre_event, store_media_files, resolve_user_restricted_access, \
resolve_embedded_fields, build_response_document, marshal_write_response, \
resolve_sub_resource_path, resolve_document_etag, oplog_push, resource_link
from eve.versioning import resolve_document_version, \
insert_versioning_documents
@ratelimit()
@requires_auth('resource')
@pre_event
def post(resource, payl=None):
"""
Default function for handling POST requests, it has decorators for
rate limiting, authentication and for raising pre-request events. After the
decorators are applied forwards to call to :func:`post_internal`
.. versionchanged:: 0.5
Split original post() into post/post_internal combo.
"""
return post_internal(resource, payl, skip_validation=False)
def post_internal(resource, payl=None, skip_validation=False):
"""
Intended for internal post calls, this method is not rate limited,
authentication is not checked and pre-request events are not raised.
Adds one or more documents to a resource. Each document is validated
against the domain schema. If validation passes the document is inserted
and ID_FIELD, LAST_UPDATED and DATE_CREATED along with a link to the
document are returned. If validation fails, a list of validation issues
is returned.
:param resource: name of the resource involved.
:param payl: alternative payload. When calling post() from your own code
you can provide an alternative payload. This can be useful,
for example, when you have a callback function hooked to a
certain endpoint, and want to perform additional post() calls
from there.
Please be advised that in order to successfully use this
option, a request context must be available.
See https://github.com/pyeve/eve/issues/74 for a
discussion, and a typical use case.
:param skip_validation: skip payload validation before write (bool)
.. versionchanged:: 0.7
Add support for Location header. Closes #795.
.. versionchanged:: 0.6
Fix: since v0.6, skip_validation = True causes a 422 response (#726).
.. versionchanged:: 0.6
Initialize DELETED field when soft_delete is enabled.
.. versionchanged:: 0.5
Back to resolving default values after validaton as now the validator
can properly validate dependency even when some have default values. See
#353.
Push updates to the OpLog.
Original post() has been split into post() and post_internal().
ETAGS are now stored with documents (#369).
.. versionchanged:: 0.4
Resolve default values before validation is performed. See #353.
Support for document versioning.
.. versionchanged:: 0.3
Return 201 if at least one document has been successfully inserted.
Fix #231 auth field not set if resource level authentication is set.
Support for media fields.
When IF_MATCH is disabled, no etag is included in the payload.
Support for new validation format introduced with Cerberus v0.5.
.. versionchanged:: 0.2
Use the new STATUS setting.
Use the new ISSUES setting.
Raise 'on_pre_<method>' event.
Explictly resolve default values instead of letting them be resolved
by common.parse. This avoids a validation error when a read-only field
also has a default value.
Added ``on_inserted*`` events after the database insert
.. versionchanged:: 0.1.1
auth.request_auth_value is now used to store the auth_field value.
.. versionchanged:: 0.1.0
More robust handling of auth_field.
Support for optional HATEOAS.
.. versionchanged: 0.0.9
Event hooks renamed to be more robuts and consistent: 'on_posting'
renamed to 'on_insert'.
You can now pass a pre-defined custom payload to the funcion.
.. versionchanged:: 0.0.9
Storing self.app.auth.userid in auth_field when 'user-restricted
resource access' is enabled.
.. versionchanged: 0.0.7
Support for Rate-Limiting.
Support for 'extra_response_fields'.
'on_posting' and 'on_posting_<resource>' events are raised before the
documents are inserted into the database. This allows callback functions
to arbitrarily edit/update the documents being stored.
.. versionchanged:: 0.0.6
Support for bulk inserts.
Please note: validation constraints are checked against the database,
and not between the payload documents themselves. This causes an
interesting corner case: in the event of a multiple documents payload
where two or more documents carry the same value for a field where the
'unique' constraint is set, the payload will validate successfully, as
there are no duplicates in the database (yet). If this is an issue, the
client can always send the documents once at a time for insertion, or
validate locally before submitting the payload to the API.
.. versionchanged:: 0.0.5
Support for 'application/json' Content-Type .
Support for 'user-restricted resource access'.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionchanged:: 0.0.3
JSON links. Superflous ``response`` container removed.
"""
date_utc = datetime.utcnow().replace(microsecond=0)
resource_def = app.config['DOMAIN'][resource]
schema = resource_def['schema']
validator = None if skip_validation else app.validator(schema, resource)
documents = []
results = []
failures = 0
id_field = resource_def['id_field']
if config.BANDWIDTH_SAVER is True:
embedded_fields = []
else:
req = parse_request(resource)
embedded_fields = resolve_embedded_fields(resource, req)
# validation, and additional fields
if payl is None:
payl = payload()
if isinstance(payl, dict):
payl = [payl]
if not payl:
# empty bulkd insert
abort(400, description=debug_error_message(
'Empty bulk insert'
))
if len(payl) > 1 and not config.DOMAIN[resource]['bulk_enabled']:
abort(400, description=debug_error_message(
'Bulk insert not allowed'
))
for value in payl:
document = []
doc_issues = {}
try:
document = parse(value, resource)
resolve_sub_resource_path(document, resource)
if skip_validation:
validation = True
else:
validation = validator.validate(document)
if validation: # validation is successful
# validator might be not available if skip_validation. #726.
if validator:
# Apply coerced values
document = validator.document
# Populate meta and default fields
document[config.LAST_UPDATED] = \
document[config.DATE_CREATED] = date_utc
if config.DOMAIN[resource]['soft_delete'] is True:
document[config.DELETED] = False
resolve_user_restricted_access(document, resource)
resolve_default_values(document, resource_def['defaults'])
store_media_files(document, resource)
resolve_document_version(document, resource, 'POST')
else:
# validation errors added to list of document issues
doc_issues = validator.errors
except ValidationError as e:
doc_issues['validator exception'] = str(e)
except Exception as e:
# most likely a problem with the incoming payload, report back to
# the client as if it was a validation issue
app.logger.exception(e)
doc_issues['exception'] = str(e)
if len(doc_issues):
document = {
config.STATUS: config.STATUS_ERR,
config.ISSUES: doc_issues,
}
failures += 1
documents.append(document)
if failures:
# If at least one document got issues, the whole request fails and a
# ``422 Bad Request`` status is return.
for document in documents:
if config.STATUS in document \
and document[config.STATUS] == config.STATUS_ERR:
results.append(document)
else:
results.append({config.STATUS: config.STATUS_OK})
return_code = config.VALIDATION_ERROR_STATUS
else:
# notify callbacks
getattr(app, "on_insert")(resource, documents)
getattr(app, "on_insert_%s" % resource)(documents)
# compute etags here as documents might have been updated by callbacks.
resolve_document_etag(documents, resource)
# bulk insert
ids = app.data.insert(resource, documents)
# update oplog if needed
oplog_push(resource, documents, 'POST')
# assign document ids
for document in documents:
# either return the custom ID_FIELD or the id returned by
# data.insert().
id_ = document.get(id_field, ids.pop(0))
document[id_field] = id_
# build the full response document
result = document
build_response_document(
result, resource, embedded_fields, document)
# add extra write meta data
result[config.STATUS] = config.STATUS_OK
# limit what actually gets sent to minimize bandwidth usage
result = marshal_write_response(result, resource)
results.append(result)
# insert versioning docs
insert_versioning_documents(resource, documents)
# notify callbacks
getattr(app, "on_inserted")(resource, documents)
getattr(app, "on_inserted_%s" % resource)(documents)
# request was received and accepted; at least one document passed
# validation and was accepted for insertion.
return_code = 201
if len(results) == 1:
response = results.pop(0)
else:
response = {
config.STATUS: config.STATUS_ERR if failures else config.STATUS_OK,
config.ITEMS: results,
}
if failures:
response[config.ERROR] = {
"code": return_code,
"message": "Insertion failure: %d document(s) contain(s) error(s)"
% failures,
}
location_header = None if return_code != 201 or not documents else \
[('Location', '%s/%s' % (resource_link(), documents[0][id_field]))]
return response, None, None, return_code, location_header
|
setrofim/workload-automation | refs/heads/master | wa/workloads/templerun2/__init__.py | 5 | # Copyright 2013-2018 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from wa import ApkReventWorkload
class TempleRun2(ApkReventWorkload):
name = 'templerun2'
package_names = ['com.imangi.templerun2']
description = """
Temple Run 2 game.
Sequel to Temple Run. 3D on-the-rails racer.
"""
view = 'SurfaceView - com.imangi.templerun2/com.imangi.unityactivity.ImangiUnityNativeActivity'
|
c1728p9/pyOCD | refs/heads/master | test/blank_test.py | 12 | #!/usr/bin/env python
"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os, sys
from time import sleep
from random import randrange
import math
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, parentdir)
import pyOCD
from pyOCD.board import MbedBoard
import logging
logging.basicConfig(level=logging.INFO)
print "\r\n\r\n------ Test attaching to locked board ------"
for i in range(0, 10):
with MbedBoard.chooseBoard() as board:
# Erase and then reset - This locks Kinetis devices
board.flash.init()
board.flash.eraseAll()
board.target.reset()
print "\r\n\r\n------ Testing Attaching to board ------"
for i in range(0, 100):
with MbedBoard.chooseBoard() as board:
board.target.halt()
sleep(0.01)
board.target.resume()
sleep(0.01)
print "\r\n\r\n------ Flashing new code ------"
with MbedBoard.chooseBoard() as board:
binary_file = os.path.join(parentdir, 'binaries', board.getTestBinary())
board.flash.flashBinary(binary_file)
print "\r\n\r\n------ Testing Attaching to regular board ------"
for i in range(0, 10):
with MbedBoard.chooseBoard() as board:
board.target.resetStopOnReset()
board.target.halt()
sleep(0.2)
board.target.resume()
sleep(0.2)
|
evandrix/Splat | refs/heads/master | code/mypkg/gui/GraphicsView.py | 1 | import sys
from PyQt4 import QtGui, QtCore
class GraphicsView(QtGui.QGraphicsView):
def __init__(self, pixmap, scene, parent, *args):
print >> sys.stderr, "GraphicsView::init()"
QtGui.QGraphicsView.__init__(self, scene)
self.scene = scene
self.pixmap = pixmap
self.win = parent
self.zoomLevel = 1.0
self.setupActions()
QtCore.QMetaObject.connectSlotsByName(self)
def setupActions(self):
"""
1: zoom fit
2: zoom org
"""
zoom_fit = QtGui.QAction(self)
zoom_fit.setShortcuts([QtGui.QKeySequence.fromString('1')])
zoom_fit.triggered.connect(self.zoom_fit)
self.addAction(zoom_fit)
zoom_org = QtGui.QAction(self)
zoom_org.setShortcuts([QtGui.QKeySequence.fromString('2')])
zoom_org.triggered.connect(self.zoom_org)
self.addAction(zoom_org)
def zoom_fit(self, *ignore):
print >> sys.stderr, "GraphicsView::zoom_fit(#1)"
if self.pixmap:
winSize, imgSize = self.size(), self.pixmap.size()
hZoom = 1.0*winSize.width ()/imgSize.width ()
vZoom = 1.0*winSize.height()/imgSize.height()
zoomLevel = min(hZoom, vZoom)
scaleFactor = zoomLevel/self.zoomLevel
self.scale(scaleFactor, scaleFactor)
self.centerOn(winSize.width()/2, winSize.height()/2)
self.zoomLevel = zoomLevel
print >> sys.stderr, "GraphicsView::zoom_fit(#1, %f)" % self.zoomLevel
def zoom_org(self, *ignore):
print >> sys.stderr, "GraphicsView::zoom_org(#2)"
scaleFactor = 1.0/self.zoomLevel
self.scale(scaleFactor, scaleFactor)
self.centerOn(self.size().width()/2, self.size().height()/2)
self.zoomLevel = 1.0
print >> sys.stderr, "GraphicsView::zoom_org(#2, %f)" % self.zoomLevel
def resizeEvent(self, event):
print >> sys.stderr, "GraphicsView::resizeEvent()"
|
yfdyh000/kuma | refs/heads/master | kuma/wiki/migrations/0021_document_attachments_populated.py | 4 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wiki', '0020_add_is_linked_is_original'),
]
operations = [
migrations.AddField(
model_name='document',
name='attachments_populated',
field=models.BooleanField(default=False),
),
]
|
opennode/nodeconductor-assembly-waldur | refs/heads/develop | src/waldur_auth_saml2/tests/test_backend.py | 1 | from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.test import TestCase
from waldur_auth_saml2.auth import WaldurSaml2Backend
User = get_user_model()
class WaldurSaml2BackendTest(TestCase):
def test_email_should_be_unique_positive(self):
User.objects.create(username='john', email='john@example.com')
attribute_mapping = {
'uid': ('username',),
'mail': ('email',),
}
attributes = {
'uid': ['john'],
'mail': ['john@example.com'],
}
backend = WaldurSaml2Backend()
user = backend.authenticate(
None, session_info={'ava': attributes}, attribute_mapping=attribute_mapping,
)
self.assertIsNotNone(user)
def test_email_should_be_unique_negative(self):
User.objects.create(username='harry', email='john@example.com')
attribute_mapping = {
'uid': ('username',),
'mail': ('email',),
}
attributes = {
'uid': ['john'],
'mail': ['john@example.com'],
}
backend = WaldurSaml2Backend()
self.assertRaises(
ValidationError,
backend.authenticate,
None,
session_info={'ava': attributes},
attribute_mapping=attribute_mapping,
)
|
Distrotech/pygobject | refs/heads/distrotech-pygobject | gi/repository/__init__.py | 23 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2009 Johan Dahlin <johan@gnome.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
from __future__ import absolute_import
import sys
from ..importer import DynamicImporter
sys.meta_path.append(DynamicImporter('gi.repository'))
del DynamicImporter
del sys
|
lawnmowerlatte/hyper | refs/heads/development | tasks.py | 7 | import json
import os
from binascii import hexlify
from invoke import task
from hyper.http20.hpack import Encoder
@task
def hpack():
"""
This task generates HPACK test data suitable for use with
https://github.com/http2jp/hpack-test-case
The current format defines a JSON object with three keys: 'draft',
'description' and 'cases'.
The cases key has as its value a list of objects, with each object
representing a set of headers and the output from the encoder. The object
has the following keys:
- 'header_table_size': the size of the header table used.
- 'headers': A list of the headers as JSON objects.
- 'wire': The output from the encoder in hexadecimal.
"""
# A generator that contains the paths to all the raw data files and their
# names.
raw_story_files = (
(os.path.join('test_fixtures/raw-data', name), name)
for name in os.listdir('test_fixtures/raw-data')
)
# For each file, build our output.
for source, outname in raw_story_files:
with open(source, 'rb') as f:
indata = json.loads(f.read())
# Prepare the output and the encoder.
output = {
'description': 'Encoded by hyper. See github.com/Lukasa/hyper for more information.',
'cases': []
}
e = Encoder()
for case in indata['cases']:
outcase = {'header_table_size': e.header_table_size}
outcase['headers'] = case['headers']
headers = []
for header in case['headers']:
key = header.keys()[0]
header = (key, header[key])
headers.append(header)
outcase['wire'] = hexlify(e.encode(headers))
output['cases'].append(outcase)
with open(outname, 'wb') as f:
f.write(json.dumps(output, sort_keys=True,
indent=2, separators=(',', ': ')))
|
voilet/cmdb | refs/heads/master | assets/views.py | 1 | # coding:utf-8
import ast
import nmap
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from assets.models import Host, IDC, Service, Line, Project, HostRecord
from django.views.decorators.csrf import csrf_protect
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from django.db.models import Q
from django.views.decorators.csrf import csrf_exempt
from forms import HostForm, IdcForm
from new_api import SaltApi, pages, sort_ip_list, get_mask_ip
from mysite.settings import salt_cdn_url, salt_center_url, salt_user, salt_passwd, zabbix_on
from assets.models import SERVER_STATUS, Server_System, ZabbixRecord, IpList
from zabbix import zabbix_host_add, zabbix_group_add, zabbix_group_del, zabbix_get_graph, zabbix_get_item, \
zabbix_get_trigger, zabbix_get_alert
from pdf import rpt, excel_output
from assets.models import Project
from assets.forms import Project_docForm
from django.http import HttpResponse
class RaiseError(Exception):
pass
def my_render(template, data, request):
return render_to_response(template, data, context_instance=RequestContext(request))
@login_required
def httperror(request, emg):
message = emg
return render_to_response('error.html', locals())
def get_diff(obj1, obj2):
fields = ['service', 'business']
no_check_fields = ['cpu', 'core_num', 'hard_disk', 'memory']
d1, d2 = obj1, dict(obj2.iterlists())
info = {}
for k, v in d1.items():
if k in fields:
if d2.get(k):
d2_value = d2[k]
else:
d2_value = u''
elif k in no_check_fields:
continue
else:
d2_value = d2[k][0]
if not v:
if v==False:
pass
else:
v = u''
if isinstance(v,list):
v.sort()
if not d2_value:
d2_value = []
d2_value.sort()
if v != d2_value:
info.update({k: [v, d2_value]})
else:
if str(v) != str(d2_value):
info.update({k: [v, d2_value]})
for k, v in info.items():
if v == [None, u'']:
info.pop(k)
return info
def db_to_record(username, host, info):
text_list = []
for k, v in info.items():
field = Host._meta.get_field_by_name(k)[0].verbose_name
if k == 'idc':
old = IDC.objects.filter(uuid=v[0])
new = IDC.objects.filter(uuid=v[1])
if old:
name_old = old[0].name
else:
name_old = u'无'
if new:
name_new = new[0].name
else:
name_new = u'无'
text = field + u'由 ' + name_old + u' 更改为 ' + name_new
elif k == 'service':
old, new = [], []
for s in v[0]:
service_name = Service.objects.get(uuid=s).name
old.append(service_name)
for s in v[1]:
service_name = Service.objects.get(uuid=s).name
new.append(service_name)
text = field + u'由 ' + ','.join(old) + u' 更改为 ' + ','.join(new)
elif k == 'business':
old, new = [], []
for s in v[0]:
project_name = Project.objects.get(uuid=s).service_name
old.append(project_name)
for s in v[1]:
project_name = Project.objects.get(uuid=s).service_name
new.append(project_name)
text = field + u'由 ' + ','.join(old) + u' 更改为 ' + ','.join(new)
elif k == 'vm':
old = Host.objects.filter(uuid=v[0])
new = Host.objects.filter(uuid=v[1])
if old:
name_old = old[0].eth1
else:
name_old = u'无'
if new:
name_new = new[0].eth1
else:
name_new = u'无'
text = field + u'父主机由 ' + name_old + u' 更改为 ' + name_new
else:
text = field + u'由 ' + str(v[0]) + u' 更改为 ' + str(v[1])
text_list.append(text)
if len(text_list) != 0:
HostRecord.objects.create(host=host, user=username, content=text_list)
def salt_record(host, salt_data):
""" salt更新资产添加历史记录 """
info = {}
field_list = ['eth1', 'mac', 'system', 'system_cpuarch', 'server_sn',
'system_version', 'cpu', 'core_num', 'hard_disk', 'memory', 'brand']
host_dic = host.__dict__
for field in field_list:
old = host_dic.get(field)
new = salt_data.get(field)
if old != new:
info[field] = [old, new]
return info
@login_required
def host_add(request):
""" 添加主机 """
uf = HostForm()
projects = Project.objects.all()
services = Service.objects.all()
if request.method == 'POST':
uf_post = HostForm(request.POST)
physics = request.POST.get('physics', '')
ip = request.POST.get('eth1', '')
if Host.objects.filter(eth1=ip):
emg = u'添加失败, 该IP %s 已存在!' % ip
return my_render('assets/host_add.html', locals(), request)
if uf_post.is_valid():
zw = uf_post.save(commit=False)
zw.mac = str(request.POST.getlist("mac")[0]).replace(':', '-').strip(" ")
status = uf_post.cleaned_data['status']
if physics:
physics_host = get_object_or_404(Host, eth1=physics)
zw.vm = physics_host
zw.type = 1
else:
zw.type = 0
zw.save()
uf_post.save_m2m()
if zabbix_on and status == 1:
zabbix_host_add(request)
smg = u'主机%s添加成功!' % ip
return render_to_response('assets/host_add.html', locals(), context_instance=RequestContext(request))
return render_to_response('assets/host_add.html', locals(), context_instance=RequestContext(request))
@login_required
def host_edit(request):
""" 修改主机 """
uuid = request.GET.get('uuid')
host = get_object_or_404(Host, uuid=uuid)
uf = HostForm(instance=host)
project_all = Project.objects.all()
project_host = host.business.all()
projects = [p for p in project_all if p not in project_host]
service_all = Service.objects.all()
service_host = host.service.all()
services = [s for s in service_all if s not in service_host]
username = request.user.username
if request.method == 'POST':
physics = request.POST.get('physics', '')
uf_post = HostForm(request.POST, instance=host)
if uf_post.is_valid():
zw = uf_post.save(commit=False)
zw.mac = str(request.POST.getlist("mac")[0]).replace(':', '-').strip(" ")
request.POST = request.POST.copy()
if physics:
physics_host = get_object_or_404(Host, eth1=physics)
request.POST['vm'] = physics_host.uuid
if host.vm:
if str(host.vm.eth1) != str(physics):
zw.vm = physics_host
else:
zw.vm = physics_host
zw.type = 1
else:
request.POST['vm'] = ''
zw.type = 0
zw.save()
uf_post.save_m2m()
new_host = get_object_or_404(Host, uuid=uuid)
info = get_diff(uf_post.__dict__.get('initial'), request.POST)
db_to_record(username, host, info)
return HttpResponseRedirect('/assets/host_detail/?uuid=%s' % uuid)
return render_to_response('assets/host_edit.html', locals(), context_instance=RequestContext(request))
@login_required
@csrf_exempt
def host_edit_batch(request):
""" 批量修改主机 """
uf = HostForm()
username = request.user.username
projects = Project.objects.all()
services = Service.objects.all()
if request.method == 'POST':
ids = str(request.GET.get('uuid', ''))
env = request.POST.get('env', '')
idc = request.POST.get('idc', '')
brand = request.POST.get('brand', '')
business = request.POST.getlist('business', '')
services = request.POST.getlist('service', '')
cabinet = request.POST.get('cabinet', '')
editor = request.POST.get('editor', '')
uuid_list = ids.split(",")
for uuid in uuid_list:
record_list = []
host = get_object_or_404(Host, uuid=uuid)
if env:
if not host.env:
info = u'无'
else:
info = host.env
if env != host.env:
text = u'环境' + u'由 ' + info + u' 更改为 ' + env
record_list.append(text)
host.env = env
if idc:
get_idc = get_object_or_404(IDC, uuid=idc)
if host.idc != get_idc.name:
if not host.idc:
text = u'IDC' + u'由 ' + "none" + u' 更改为 ' + get_idc.name
else:
text = u'IDC' + u'由 ' + host.idc.name + u' 更改为 ' + get_idc.name
record_list.append(text)
host.idc = get_idc
if brand:
if brand != host.brand:
text = u'硬件厂商' + u'由 ' + host.brand + u' 更改为 ' + brand
record_list.append(text)
host.brand = brand
if business:
old, new, project_list = [], [], []
for s in host.business.all():
project_name = s.service_name
old.append(project_name)
for s in business:
project = Project.objects.get(uuid=s)
project_name = project.service_name
new.append(project_name)
project_list.append(project)
if old != new:
text = u'所属业务' + u'由 ' + ','.join(old) + u' 更改为 ' + ','.join(new)
record_list.append(text)
host.business = project_list
if services:
old, new, service_list = [], [], []
for s in host.service.all():
service_name = s.name
old.append(service_name)
for s in services:
service = Service.objects.get(uuid=s)
service_name = service.name
new.append(service_name)
service_list.append(service)
if old != new:
text = u'运行服务' + u'由 ' + ','.join(old) + u' 更改为 ' + ','.join(new)
record_list.append(text)
host.service = service_list
if cabinet:
if not host.cabinet:
info = u'无'
else:
info = host.cabinet
if cabinet != host.cabinet:
text = '机柜号' + u'由 ' + info + u' 更改为 ' + cabinet
record_list.append(text)
host.cabinet = cabinet
if editor:
if editor != host.editor:
text = '备注' + u'由 ' + host.editor + u' 更改为 ' + editor
record_list.append(text)
host.editor = editor
if len(record_list) != 0:
host.save()
HostRecord.objects.create(host=host, user=username, content=record_list)
return my_render('assets/host_edit_batch_ok.html', locals(), request)
return my_render('assets/host_edit_batch.html', locals(), request)
@login_required
def host_detail(request):
""" 主机详情 """
uuid = request.GET.get('uuid', '')
ip = request.GET.get('ip', '')
if uuid:
host = get_object_or_404(Host, uuid=uuid)
elif ip:
host = get_object_or_404(Host, eth1=ip)
host_record = HostRecord.objects.filter(host=host).order_by('-time')
return render_to_response('assets/host_detail.html', locals(), context_instance=RequestContext(request))
@login_required
def host_del(request):
""" 删除主机 """
uuid = request.GET.get('uuid', '')
host = get_object_or_404(Host, uuid=uuid)
host.status = 3
host.eth1 = ''
host.eth2 = ''
host.node_name = host.uuid
host.internal_ip = ''
host.system = ''
host.system_cpuarch = ''
host.system_version = ''
host.cabinet = ''
host.server_cabinet_id = 0
host.env = ''
host.number = ''
host.switch_port = ''
idc_ = IDC.objects.filter(name=u"报废库房")
if idc_.exists():
idc_ = idc_.first()
else:
idc_ = None
host.idc = idc_
host.business.clear()
host.service.clear()
host.save()
return HttpResponseRedirect('/assets/host_list/')
@login_required
def host_del_batch(request):
""" 批量删除主机 """
ids = str(request.POST.get('ids'))
for uuid in ids.split(','):
host = get_object_or_404(Host, uuid=uuid)
host.status = 3
host.eth1 = ''
host.eth2 = ''
host.node_name = host.uuid
host.internal_ip = ''
host.system = ''
host.system_cpuarch = ''
host.system_version = ''
host.cabinet = ''
host.server_cabinet_id = 0
host.env = ''
host.number = ''
host.switch_port = ''
idc_ = IDC.objects.filter(name=u"报废库房")
if idc_.exists():
idc_ = idc_.first()
else:
idc_ = None
host.idc = idc_
host.business.clear()
host.service.clear()
host.save()
return HttpResponseRedirect('/assets/host_list/')
@login_required
def host_list(request):
""" 主机列表 """
hosts = Host.objects.all().order_by("-eth1")
idcs = IDC.objects.filter()
lines = Line.objects.all()
server_type = Project.objects.all()
services = Service.objects.all()
brands = Server_System
server_status = SERVER_STATUS
server_list_count = hosts.count()
physics = Host.objects.filter(vm__isnull=True).count()
vms = Host.objects.filter(vm__isnull=False).count()
contact_list, p, contacts, page_range, current_page, show_first, show_end = pages(hosts, request)
return render_to_response('assets/host_list.html', locals(), context_instance=RequestContext(request))
@login_required
@csrf_protect
def host_add_batch_bak(request):
""" 批量添加主机 """
if request.method == 'POST':
multi_hosts = request.POST.get('batch').split('\n')
for host in multi_hosts:
if host == '':
break
ip, hostname, idc, service, brand, comment, pip = host.split()
idc = get_object_or_404(IDC, name=idc)
services = []
for s in ast.literal_eval(service):
services.append(get_object_or_404(Service, name=s.strip()))
if Host.objects.filter(eth1=ip):
emg = u'添加失败, 该IP%s已存在' % ip
return my_render('assets/host_add_batch.html', locals(), request)
if pip != '[]':
pip = Host.objects.get(eth1=ast.literal_eval(pip)[0])
asset = Host(node_name=hostname, eth1=ip, idc=idc, brand=brand, editor=comment, vm=pip)
else:
asset = Host(node_name=hostname, eth1=ip, idc=idc, brand=brand, editor=comment)
asset.save()
asset.service = services
asset.save()
smg = u'批量添加成功.'
return my_render('assets/host_add_batch.html', locals(), request)
return my_render('assets/host_add_batch.html', locals(), request)
@login_required
@csrf_protect
def host_add_batch(request):
""" 批量添加主机 """
if request.method == 'POST':
multi_hosts = request.POST.get('batch').split('\n')
for host in multi_hosts:
if host == '':
break
print host
print len(host.split("@"))
print host.split("@")
node_name, cpu, memory, hard_disk, number, brand, eth1, eth2, internal_ip, idc, comment, = host.split('@')
print idc
asset = Host(node_name=node_name, number=number, brand=brand, cpu=cpu,
memory=memory, hard_disk=hard_disk, eth1=eth1,
eth2=eth2, internal_ip=internal_ip, editor=comment)
asset.save()
smg = u'批量添加成功.'
return my_render('assets/host_add_batch.html', locals(), request)
return my_render('assets/host_add_batch.html', locals(), request)
@login_required
def idc_add(request):
""" 添加IDC """
if request.method == 'POST':
init = request.GET.get("init", False)
uf = IdcForm(request.POST)
if uf.is_valid():
idc_name = uf.cleaned_data['name']
if IDC.objects.filter(name=idc_name):
emg = u'添加失败, 此IDC %s 已存在!' % idc_name
return my_render('assets/idc_add.html', locals(), request)
uf.save()
if zabbix_on:
ret = zabbix_group_add(idc_name)
if ret != 1:
emg = u'添加zabbix主机组 %s 失败!' % idc_name
return my_render('assets/idc_add.html', locals(), request)
if not init:
return HttpResponseRedirect("/assets/idc_list/")
else:
return HttpResponseRedirect('/assets/server/type/add/?init=true')
else:
uf = IdcForm()
return render_to_response('assets/idc_add.html', locals(), context_instance=RequestContext(request))
@login_required
def idc_list(request):
idcs = IDC.objects.all()
server_type = Project.objects.all()
return render_to_response('assets/idc_list.html', locals(), context_instance=RequestContext(request))
@login_required
def idc_edit(request):
uuid = request.GET.get('uuid', '')
idc = get_object_or_404(IDC, uuid=uuid)
if request.method == 'POST':
uf = IdcForm(request.POST, instance=idc)
if uf.is_valid():
uf.save()
return HttpResponseRedirect("/assets/idc_list/")
else:
uf = IdcForm(instance=idc)
return my_render('assets/idc_edit.html', locals(), request)
@login_required
def idc_del(request):
uuid = request.GET.get('uuid', '')
idc = get_object_or_404(IDC, uuid=uuid)
idc_name = idc.name
if zabbix_on:
zabbix_group_del(idc_name)
idc.delete()
return HttpResponseRedirect('/assets/idc_list/')
@login_required
@csrf_exempt
def host_search(request):
""" 条件搜索ajax """
idcs = IDC.objects.filter()
lines = Line.objects.all()
server_type = Project.objects.all()
services = Service.objects.all()
brands = Server_System
server_status = SERVER_STATUS
lines = Line.objects.all()
businesses = Project.objects.all()
idc_name = request.GET.get('change_idc', '')
business_name = request.GET.get('change_business', '')
service_name = request.GET.get('change_service', '')
brand_name = request.GET.get('change_brand', '')
if brand_name:
brand_name = brand_name
status = request.GET.get('change_status', False)
if status:
status = int(status)
else:
status = ""
type = request.GET.get('change_type', '')
if not idc_name and not type and not status and not brand_name and business_name == 'all' \
and service_name == 'all':
select_number = 0
else:
select_number = 1
keyword = request.GET.get('keyword', '')
s_url = request.get_full_path()
if business_name == 'all' and service_name != 'all':
ser = Service.objects.get(name=service_name)
hosts = Host.objects.filter(idc__name__contains=idc_name,
service=ser,
brand__contains=brand_name,
status__contains=status,
type__contains=type)
elif service_name == 'all' and business_name != 'all':
business = Project.objects.get(service_name=business_name)
hosts = Host.objects.filter(idc__name__contains=idc_name,
business=business,
brand__contains=brand_name,
status__contains=status,
type__contains=type)
elif business_name == 'all' and service_name == 'all':
hosts = Host.objects.filter(idc__name__contains=idc_name,
brand__contains=brand_name,
status__contains=status,
type__contains=type)
else:
ser = Service.objects.get(name=service_name)
business = Project.objects.get(service_name=business_name)
hosts = Host.objects.filter(idc__name__contains=idc_name,
business=business,
service=ser,
brand__contains=brand_name,
status__contains=status,
type__contains=type)
if keyword and select_number == 1:
hosts = hosts.filter(Q(node_name__contains=keyword) |
Q(idc__name__contains=keyword) |
Q(eth1__contains=keyword) |
Q(eth2__contains=keyword) |
Q(internal_ip__contains=keyword) |
Q(brand__contains=keyword) |
Q(number__contains=keyword) |
Q(editor__contains=keyword) |
Q(business__service_name__contains=keyword) |
Q(service__name__contains=keyword) |
Q(Services_Code__contains=keyword) |
Q(server_sn__contains=keyword) |
Q(cpu__contains=keyword) |
Q(memory__contains=keyword) |
Q(hard_disk__contains=keyword))
elif keyword:
hosts = Host.objects.filter(Q(node_name__contains=keyword) |
Q(idc__name__contains=keyword) |
Q(eth1__contains=keyword) |
Q(eth2__contains=keyword) |
Q(internal_ip__contains=keyword) |
Q(brand__contains=keyword) |
Q(number__contains=keyword) |
Q(editor__contains=keyword) |
Q(business__service_name__contains=keyword) |
Q(service__name__contains=keyword) |
Q(Services_Code__contains=keyword) |
Q(server_sn__contains=keyword) |
Q(cpu__contains=keyword) |
Q(memory__contains=keyword) |
Q(hard_disk__contains=keyword))
hosts = list(set(hosts))
hosts_dic = {}
hosts_lis = []
for host in hosts:
if host.eth1:
hosts_dic[host.eth1] = host
hosts_lis.append(host.eth1)
elif host.eth2:
hosts_dic[host.eth2] = host
hosts_lis.append(host.eth2)
sort_ip_list(hosts_lis)
hosts = []
for eth1 in hosts_lis:
hosts.append(hosts_dic[eth1])
search_status = request.GET.get("_search", False)
search_output_name = request.GET.get("name", False)
if search_status and search_output_name:
if search_output_name == 'pdf':
s = rpt(hosts)
if s:
data = "pdf"
return render_to_response('assets/download.html', locals(), context_instance=RequestContext(request))
if search_output_name == 'excel':
s = excel_output(hosts)
if s:
data = "execl"
return render_to_response('assets/download.html', locals(), context_instance=RequestContext(request))
contact_list, p, contacts, page_range, current_page, show_first, show_end = pages(hosts, request)
if 'ajax' in request.get_full_path():
s_url = s_url.replace('change_info_ajax', 'host_search')
return my_render('assets/host_info_ajax.html', locals(), request)
else:
hosts = Host.objects.all()
idcs = IDC.objects.filter()
lines = Line.objects.all()
server_type = Project.objects.all()
services = Service.objects.all()
brands = Server_System
server_status = SERVER_STATUS
server_list_count = hosts.count()
physics = Host.objects.filter(vm__isnull=True).count()
vms = Host.objects.filter(vm__isnull=False).count()
search = 1
return my_render('assets/host_list.html', locals(), request)
@login_required
def host_update(request):
""" 使用salt更新资产信息 """
uuid = request.GET.get('uuid', '')
host = get_object_or_404(Host, uuid=uuid)
hostname = str(host.node_name)
idc_type = host.idc.get_type_display()
if idc_type == 'CDN':
salt_url = salt_cdn_url
elif idc_type == '核心':
salt_url = salt_center_url
salt_api = SaltApi(url=salt_url, username=salt_user, password=salt_passwd)
grains = salt_api.remote_noarg_exec(hostname, 'grains.items')
if len(grains) == 0:
return httperror(request, '此主机salt-minion无数据返回, 请确定minion是否正常运行.')
ip_info = grains['ip_interfaces']
mac_info = grains['hwaddr_interfaces']
if 'eth0' in ip_info:
if 'bond0' in ip_info:
eth1 = ip_info['bond0'][0]
mac = mac_info["bond0"]
elif 'br0' in ip_info:
eth1 = ip_info['br0'][0]
mac = mac_info["br0"]
else:
eth1 = ip_info["eth0"][0]
mac = mac_info['eth0']
elif 'em1' in ip_info:
if 'bond0' in ip_info:
eth1 = ip_info['bond0'][0]
mac = mac_info["bond0"]
elif 'br0' in ip_info:
eth1 = ip_info['br0'][0]
mac = mac_info["br0"]
else:
eth1 = ip_info["em1"][0]
mac = mac_info["em1"]
else:
eth1 = "127.0.0.1"
system = grains['os']
system_cpuarch = grains['osarch']
server_sn = grains['sn']
system_version = grains['osrelease']
cpu = grains['cpu_model'].split()[3] + '*' + str(grains['num_cpus'])
hard_disk = grains['disk']
memory = grains['memory']
brand = grains['brand']
salt_data = {'eth1': eth1, 'mac': mac, 'system': system, 'system_cpuarch': system_cpuarch,
'server_sn': server_sn, 'system_version': system_version, 'cpu': cpu,
'hard_disk': hard_disk, 'memory': memory, 'brand': brand}
info = salt_record(host, salt_data)
db_to_record('salt', host, info)
host.eth1 = eth1
host.mac = mac
host.cpu = cpu
# host.raid = raid
host.hard_disk = hard_disk
host.memory = memory
host.brand = brand
host.system = system
host.system_cpuarch = system_cpuarch
host.system_version = system_version
host.server_sn = server_sn
host.save()
return HttpResponseRedirect('/assets/host_detail/?uuid=%s' % uuid)
@login_required
def zabbix_info(request):
""" zabbix信息 """
records = ZabbixRecord.objects.all()
return my_render('assets/zabbix.html', locals(), request)
@login_required
def zabbix_host(request):
""" zabbix主机详情页 """
uuid = request.GET.get('uuid')
host = get_object_or_404(Host, uuid=uuid)
eth1 = host.eth1
graphs = zabbix_get_graph(eth1)
items = zabbix_get_item(eth1)
triggers = zabbix_get_trigger(eth1)
alerts = zabbix_get_alert(eth1)
return my_render('assets/zabbix_host.html', locals(), request)
@login_required
def ip_list(request):
""" ip列表 """
idcs = IDC.objects.all()
# yizhuang_idc = get_object_or_404(IDC, name='亦庄电信')
# active_111 = IpList.objects.filter(idc=yizhuang_idc, network='192.168.111.0/24', status=1)
# unactive_111 = IpList.objects.filter(idc=yizhuang_idc, network='192.168.111.0/24', status=1)
# active_112 = IpList.objects.filter(idc=yizhuang_idc, network='192.168.112.0/24', status=1)
# unactive_112 = IpList.objects.filter(idc=yizhuang_idc, network='192.168.112.0/24', status=1)
# active_113 = IpList.objects.filter(idc=yizhuang_idc, network='192.168.113.0/24', status=1)
# unactive_113 = IpList.objects.filter(idc=yizhuang_idc, network='192.168.113.0/24', status=1)
return render_to_response('assets/ip_list.html', locals(), context_instance=RequestContext(request))
@login_required
def ip_list_ajax(request):
""" ip网段异步 """
idc_name = request.GET.get('idc_name', '')
idc = get_object_or_404(IDC, name=idc_name)
if idc:
network = idc.network
if network:
networks = network.split('\r\n')
else:
networks = [u'无']
return my_render('assets/ip_list_ajax.html', locals(), request)
@login_required
def ip_list_info(request):
""""""
idc_name = request.GET.get('idc_name', '')
network = request.GET.get('network', '')
fresh = request.GET.get('fresh', '')
idc = get_object_or_404(IDC, name=idc_name)
if fresh == '1':
ip_active, ip_unactive = ip_list_refresh(idc, network)
elif fresh == '0':
ip_active = IpList.objects.filter(idc=idc, network=network, status=1)
ip_unactive = IpList.objects.filter(idc=idc, network=network, status=0)
if not ip_active:
ip_active, ip_unactive = ip_list_refresh(idc, network)
return my_render('assets/ip_list_info.html', locals(), request)
@login_required
def ip_list_refresh(idc, network):
nm = nmap.PortScanner()
ip_active, ip_unactive = [], []
ips = nm.scan(hosts=network, arguments='-v -sP -PE -n --min-hostgroup 1 --min-parallelism 1')['scan']
for ip in ips:
if ips[ip]['status']['state'] == 'up':
ip_active.append(str(ip))
else:
ip_unactive.append(str(ip))
sort_ip_list(ip_active)
sort_ip_list(ip_unactive)
IpList.objects.filter(idc=idc, network=network).delete()
for ip in ip_active:
IpList.objects.create(idc=idc, network=network, ip=ip, status=1)
for ip in ip_unactive:
IpList.objects.create(idc=idc, network=network, ip=ip, status=0)
return ip_active, ip_unactive
@login_required
def MarkDown_edit(request, uuid):
""" Markdown编缉器 """
s = Project.objects.get(pk=uuid)
data = Project_docForm()
if request.method == 'POST':
uf_post = Project_docForm(request.POST, instance=s)
if uf_post.is_valid():
zw = uf_post.save()
zw.save()
url = "%s&token=%s&options=%s" % (request.GET["next"], request.GET.get("token"), request.GET.get("options"))
return HttpResponseRedirect(url)
return render_to_response('markdown/index.html', locals(), context_instance=RequestContext(request))
@login_required
def MarkDown_select(request):
""" Markdown编缉器 """
data = Project.objects.all()
return render_to_response('markdown/index.html', locals(), context_instance=RequestContext(request))
def MarkDown_content(request, uuid):
s = Project.objects.get(pk=uuid)
return HttpResponse(s.description)
|
munhanha/mtg-random | refs/heads/master | django/contrib/localflavor/ch/ch_states.py | 544 | # -*- coding: utf-8 -*
from django.utils.translation import ugettext_lazy as _
STATE_CHOICES = (
('AG', _('Aargau')),
('AI', _('Appenzell Innerrhoden')),
('AR', _('Appenzell Ausserrhoden')),
('BS', _('Basel-Stadt')),
('BL', _('Basel-Land')),
('BE', _('Berne')),
('FR', _('Fribourg')),
('GE', _('Geneva')),
('GL', _('Glarus')),
('GR', _('Graubuenden')),
('JU', _('Jura')),
('LU', _('Lucerne')),
('NE', _('Neuchatel')),
('NW', _('Nidwalden')),
('OW', _('Obwalden')),
('SH', _('Schaffhausen')),
('SZ', _('Schwyz')),
('SO', _('Solothurn')),
('SG', _('St. Gallen')),
('TG', _('Thurgau')),
('TI', _('Ticino')),
('UR', _('Uri')),
('VS', _('Valais')),
('VD', _('Vaud')),
('ZG', _('Zug')),
('ZH', _('Zurich'))
)
|
redhat-openstack/nova | refs/heads/f22-patches | nova/network/floating_ips.py | 10 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from oslo import messaging
import six
from nova import context
from nova.db import base
from nova import exception
from nova.i18n import _
from nova.network import rpcapi as network_rpcapi
from nova import objects
from nova.openstack.common import excutils
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova.openstack.common import uuidutils
from nova import quota
from nova import rpc
from nova import servicegroup
from nova import utils
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
floating_opts = [
cfg.StrOpt('default_floating_pool',
default='nova',
help='Default pool for floating IPs'),
cfg.BoolOpt('auto_assign_floating_ip',
default=False,
help='Autoassigning floating IP to VM'),
cfg.StrOpt('floating_ip_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help='Full class name for the DNS Manager for floating IPs'),
cfg.StrOpt('instance_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help='Full class name for the DNS Manager for instance IPs'),
cfg.StrOpt('instance_dns_domain',
default='',
help='Full class name for the DNS Zone for instance IPs'),
]
CONF = cfg.CONF
CONF.register_opts(floating_opts)
CONF.import_opt('public_interface', 'nova.network.linux_net')
CONF.import_opt('network_topic', 'nova.network.rpcapi')
class FloatingIP(object):
"""Mixin class for adding floating IP functionality to a manager."""
servicegroup_api = None
def init_host_floating_ips(self):
"""Configures floating ips owned by host."""
admin_context = context.get_admin_context()
try:
floating_ips = objects.FloatingIPList.get_by_host(admin_context,
self.host)
except exception.NotFound:
return
for floating_ip in floating_ips:
if floating_ip.fixed_ip_id:
try:
fixed_ip = floating_ip.fixed_ip
except exception.FixedIpNotFound:
msg = _('Fixed ip %s not found') % floating_ip.fixed_ip_id
LOG.debug(msg)
continue
interface = CONF.public_interface or floating_ip.interface
try:
self.l3driver.add_floating_ip(floating_ip.address,
fixed_ip.address,
interface,
fixed_ip.network)
except processutils.ProcessExecutionError:
LOG.debug('Interface %s not found', interface)
raise exception.NoFloatingIpInterface(interface=interface)
def allocate_for_instance(self, context, **kwargs):
"""Handles allocating the floating IP resources for an instance.
calls super class allocate_for_instance() as well
rpc.called by network_api
"""
instance_uuid = kwargs.get('instance_id')
if not uuidutils.is_uuid_like(instance_uuid):
instance_uuid = kwargs.get('instance_uuid')
project_id = kwargs.get('project_id')
# call the next inherited class's allocate_for_instance()
# which is currently the NetworkManager version
# do this first so fixed ip is already allocated
nw_info = super(FloatingIP, self).allocate_for_instance(context,
**kwargs)
if CONF.auto_assign_floating_ip:
context = context.elevated()
# allocate a floating ip
floating_address = self.allocate_floating_ip(context, project_id,
True)
LOG.debug("floating IP allocation for instance "
"|%s|", floating_address,
instance_uuid=instance_uuid, context=context)
# get the first fixed address belonging to the instance
fixed_ips = nw_info.fixed_ips()
fixed_address = fixed_ips[0]['address']
# associate the floating ip to fixed_ip
self.associate_floating_ip(context,
floating_address,
fixed_address,
affect_auto_assigned=True)
# create a fresh set of network info that contains the floating ip
nw_info = self.get_instance_nw_info(context, **kwargs)
return nw_info
def deallocate_for_instance(self, context, **kwargs):
"""Handles deallocating floating IP resources for an instance.
calls super class deallocate_for_instance() as well.
rpc.called by network_api
"""
if 'instance' in kwargs:
instance_uuid = kwargs['instance'].uuid
else:
instance_uuid = kwargs['instance_id']
if not uuidutils.is_uuid_like(instance_uuid):
# NOTE(francois.charlier): in some cases the instance might be
# deleted before the IPs are released, so we need to get
# deleted instances too
instance = objects.Instance.get_by_id(
context.elevated(read_deleted='yes'), instance_uuid)
instance_uuid = instance.uuid
try:
fixed_ips = objects.FixedIPList.get_by_instance_uuid(
context, instance_uuid)
except exception.FixedIpNotFoundForInstance:
fixed_ips = []
# add to kwargs so we can pass to super to save a db lookup there
kwargs['fixed_ips'] = fixed_ips
for fixed_ip in fixed_ips:
fixed_id = fixed_ip.id
floating_ips = objects.FloatingIPList.get_by_fixed_ip_id(context,
fixed_id)
# disassociate floating ips related to fixed_ip
for floating_ip in floating_ips:
address = str(floating_ip.address)
try:
self.disassociate_floating_ip(context,
address,
affect_auto_assigned=True)
except exception.FloatingIpNotAssociated:
LOG.info(_("Floating IP %s is not associated. Ignore."),
address)
# deallocate if auto_assigned
if floating_ip.auto_assigned:
self.deallocate_floating_ip(context, address,
affect_auto_assigned=True)
# call the next inherited class's deallocate_for_instance()
# which is currently the NetworkManager version
# call this after so floating IPs are handled first
super(FloatingIP, self).deallocate_for_instance(context, **kwargs)
def _floating_ip_owned_by_project(self, context, floating_ip):
"""Raises if floating ip does not belong to project."""
if context.is_admin:
return
if floating_ip.project_id != context.project_id:
if floating_ip.project_id is None:
LOG.warn(_('Address |%(address)s| is not allocated'),
{'address': floating_ip.address})
raise exception.Forbidden()
else:
LOG.warn(_('Address |%(address)s| is not allocated to your '
'project |%(project)s|'),
{'address': floating_ip.address,
'project': context.project_id})
raise exception.Forbidden()
def allocate_floating_ip(self, context, project_id, auto_assigned=False,
pool=None):
"""Gets a floating ip from the pool."""
# NOTE(tr3buchet): all network hosts in zone now use the same pool
pool = pool or CONF.default_floating_pool
use_quota = not auto_assigned
# Check the quota; can't put this in the API because we get
# called into from other places
try:
if use_quota:
reservations = QUOTAS.reserve(context, floating_ips=1,
project_id=project_id)
except exception.OverQuota:
LOG.warn(_("Quota exceeded for %s, tried to allocate "
"floating IP"), context.project_id)
raise exception.FloatingIpLimitExceeded()
try:
floating_ip = objects.FloatingIP.allocate_address(
context, project_id, pool, auto_assigned=auto_assigned)
payload = dict(project_id=project_id, floating_ip=floating_ip)
self.notifier.info(context,
'network.floating_ip.allocate', payload)
# Commit the reservations
if use_quota:
QUOTAS.commit(context, reservations, project_id=project_id)
except Exception:
with excutils.save_and_reraise_exception():
if use_quota:
QUOTAS.rollback(context, reservations,
project_id=project_id)
return floating_ip
@messaging.expected_exceptions(exception.FloatingIpNotFoundForAddress)
def deallocate_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Returns a floating ip to the pool."""
floating_ip = objects.FloatingIP.get_by_address(context, address)
# handle auto_assigned
if not affect_auto_assigned and floating_ip.auto_assigned:
return
use_quota = not floating_ip.auto_assigned
# make sure project owns this floating ip (allocated)
self._floating_ip_owned_by_project(context, floating_ip)
# make sure floating ip is not associated
if floating_ip.fixed_ip_id:
floating_address = floating_ip.address
raise exception.FloatingIpAssociated(address=floating_address)
# clean up any associated DNS entries
self._delete_all_entries_for_ip(context,
floating_ip.address)
payload = dict(project_id=floating_ip.project_id,
floating_ip=str(floating_ip.address))
self.notifier.info(context, 'network.floating_ip.deallocate', payload)
project_id = floating_ip.project_id
# Get reservations...
try:
if use_quota:
reservations = QUOTAS.reserve(context,
project_id=project_id,
floating_ips=-1)
else:
reservations = None
except Exception:
reservations = None
LOG.exception(_("Failed to update usages deallocating "
"floating IP"))
rows_updated = objects.FloatingIP.deallocate(context, address)
# number of updated rows will be 0 if concurrently another
# API call has also deallocated the same floating ip
if not rows_updated:
if reservations:
QUOTAS.rollback(context, reservations, project_id=project_id)
else:
# Commit the reservations
if reservations:
QUOTAS.commit(context, reservations, project_id=project_id)
@messaging.expected_exceptions(exception.FloatingIpNotFoundForAddress)
def associate_floating_ip(self, context, floating_address, fixed_address,
affect_auto_assigned=False):
"""Associates a floating ip with a fixed ip.
Makes sure everything makes sense then calls _associate_floating_ip,
rpc'ing to correct host if i'm not it.
Access to the floating_address is verified but access to the
fixed_address is not verified. This assumes that that the calling
side has already verified that the fixed_address is legal by
checking access to the instance.
"""
floating_ip = objects.FloatingIP.get_by_address(context,
floating_address)
# handle auto_assigned
if not affect_auto_assigned and floating_ip.auto_assigned:
return
# make sure project owns this floating ip (allocated)
self._floating_ip_owned_by_project(context, floating_ip)
# disassociate any already associated
orig_instance_uuid = None
if floating_ip.fixed_ip_id:
# find previously associated instance
fixed_ip = floating_ip.fixed_ip
if str(fixed_ip.address) == fixed_address:
# NOTE(vish): already associated to this address
return
orig_instance_uuid = fixed_ip.instance_uuid
self.disassociate_floating_ip(context, floating_address)
fixed_ip = objects.FixedIP.get_by_address(context, fixed_address)
# send to correct host, unless i'm the correct host
network = objects.Network.get_by_id(context.elevated(),
fixed_ip.network_id)
if network.multi_host:
instance = objects.Instance.get_by_uuid(
context, fixed_ip.instance_uuid)
host = instance.host
else:
host = network.host
interface = floating_ip.interface
if host == self.host:
# i'm the correct host
self._associate_floating_ip(context, floating_address,
fixed_address, interface,
fixed_ip.instance_uuid)
else:
# send to correct host
self.network_rpcapi._associate_floating_ip(context,
floating_address, fixed_address, interface, host,
fixed_ip.instance_uuid)
return orig_instance_uuid
def _associate_floating_ip(self, context, floating_address, fixed_address,
interface, instance_uuid):
"""Performs db and driver calls to associate floating ip & fixed ip."""
interface = CONF.public_interface or interface
@utils.synchronized(unicode(floating_address))
def do_associate():
# associate floating ip
floating = objects.FloatingIP.associate(context, floating_address,
fixed_address, self.host)
fixed = floating.fixed_ip
if not fixed:
# NOTE(vish): ip was already associated
return
try:
# gogo driver time
self.l3driver.add_floating_ip(floating_address, fixed_address,
interface, fixed['network'])
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
try:
objects.FloatingIP.disassociate(context,
floating_address)
except Exception:
LOG.warn(_('Failed to disassociated floating '
'address: %s'), floating_address)
pass
if "Cannot find device" in six.text_type(e):
try:
LOG.error(_('Interface %s not found'), interface)
except Exception:
pass
raise exception.NoFloatingIpInterface(
interface=interface)
payload = dict(project_id=context.project_id,
instance_id=instance_uuid,
floating_ip=floating_address)
self.notifier.info(context,
'network.floating_ip.associate', payload)
do_associate()
@messaging.expected_exceptions(exception.FloatingIpNotFoundForAddress)
def disassociate_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Disassociates a floating ip from its fixed ip.
Makes sure everything makes sense then calls _disassociate_floating_ip,
rpc'ing to correct host if i'm not it.
"""
floating_ip = objects.FloatingIP.get_by_address(context, address)
# handle auto assigned
if not affect_auto_assigned and floating_ip.auto_assigned:
raise exception.CannotDisassociateAutoAssignedFloatingIP()
# make sure project owns this floating ip (allocated)
self._floating_ip_owned_by_project(context, floating_ip)
# make sure floating ip is associated
if not floating_ip.fixed_ip_id:
floating_address = floating_ip.address
raise exception.FloatingIpNotAssociated(address=floating_address)
fixed_ip = objects.FixedIP.get_by_id(context, floating_ip.fixed_ip_id)
# send to correct host, unless i'm the correct host
network = objects.Network.get_by_id(context.elevated(),
fixed_ip.network_id)
interface = floating_ip.interface
if network.multi_host:
instance = objects.Instance.get_by_uuid(
context, fixed_ip.instance_uuid)
service = objects.Service.get_by_host_and_topic(
context.elevated(), instance.host, CONF.network_topic)
if service and self.servicegroup_api.service_is_up(service):
host = instance.host
else:
# NOTE(vish): if the service is down just deallocate the data
# locally. Set the host to local so the call will
# not go over rpc and set interface to None so the
# teardown in the driver does not happen.
host = self.host
interface = None
else:
host = network.host
if host == self.host:
# i'm the correct host
self._disassociate_floating_ip(context, address, interface,
fixed_ip.instance_uuid)
else:
# send to correct host
self.network_rpcapi._disassociate_floating_ip(context, address,
interface, host, fixed_ip.instance_uuid)
def _disassociate_floating_ip(self, context, address, interface,
instance_uuid):
"""Performs db and driver calls to disassociate floating ip."""
interface = CONF.public_interface or interface
@utils.synchronized(unicode(address))
def do_disassociate():
# NOTE(vish): Note that we are disassociating in the db before we
# actually remove the ip address on the host. We are
# safe from races on this host due to the decorator,
# but another host might grab the ip right away. We
# don't worry about this case because the minuscule
# window where the ip is on both hosts shouldn't cause
# any problems.
floating = objects.FloatingIP.disassociate(context, address)
fixed = floating.fixed_ip
if not fixed:
# NOTE(vish): ip was already disassociated
return
if interface:
# go go driver time
self.l3driver.remove_floating_ip(address, fixed.address,
interface, fixed.network)
payload = dict(project_id=context.project_id,
instance_id=instance_uuid,
floating_ip=address)
self.notifier.info(context,
'network.floating_ip.disassociate', payload)
do_disassociate()
@messaging.expected_exceptions(exception.FloatingIpNotFound)
def get_floating_ip(self, context, id):
"""Returns a floating IP as a dict."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi.
return dict(objects.FloatingIP.get_by_id(context, id).iteritems())
def get_floating_pools(self, context):
"""Returns list of floating pools."""
# NOTE(maurosr) This method should be removed in future, replaced by
# get_floating_ip_pools. See bug #1091668
return self.get_floating_ip_pools(context)
def get_floating_ip_pools(self, context):
"""Returns list of floating ip pools."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi.
pools = objects.FloatingIP.get_pool_names(context)
return [dict(name=name) for name in pools]
def get_floating_ip_by_address(self, context, address):
"""Returns a floating IP as a dict."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi.
return objects.FloatingIP.get_by_address(context, address)
def get_floating_ips_by_project(self, context):
"""Returns the floating IPs allocated to a project."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi.
return objects.FloatingIPList.get_by_project(context,
context.project_id)
def get_floating_ips_by_fixed_address(self, context, fixed_address):
"""Returns the floating IPs associated with a fixed_address."""
# NOTE(vish): This is no longer used but can't be removed until
# we major version the network_rpcapi.
floating_ips = objects.FloatingIPList.get_by_fixed_address(
context, fixed_address)
return [str(floating_ip.address) for floating_ip in floating_ips]
def _is_stale_floating_ip_address(self, context, floating_ip):
try:
self._floating_ip_owned_by_project(context, floating_ip)
except exception.Forbidden:
return True
return False if floating_ip.get('fixed_ip_id') else True
def migrate_instance_start(self, context, instance_uuid,
floating_addresses,
rxtx_factor=None, project_id=None,
source=None, dest=None):
# We only care if floating_addresses are provided and we're
# switching hosts
if not floating_addresses or (source and source == dest):
return
LOG.info(_("Starting migration network for instance %s"),
instance_uuid)
for address in floating_addresses:
floating_ip = objects.FloatingIP.get_by_address(context, address)
if self._is_stale_floating_ip_address(context, floating_ip):
LOG.warn(_("Floating ip address |%(address)s| no longer "
"belongs to instance %(instance_uuid)s. Will not "
"migrate it "),
{'address': address, 'instance_uuid': instance_uuid})
continue
interface = CONF.public_interface or floating_ip.interface
fixed_ip = floating_ip.fixed_ip
self.l3driver.remove_floating_ip(floating_ip.address,
fixed_ip.address,
interface,
fixed_ip.network)
# NOTE(wenjianhn): Make this address will not be bound to public
# interface when restarts nova-network on dest compute node
floating_ip.host = None
floating_ip.save()
def migrate_instance_finish(self, context, instance_uuid,
floating_addresses, host=None,
rxtx_factor=None, project_id=None,
source=None, dest=None):
# We only care if floating_addresses are provided and we're
# switching hosts
if host and not dest:
dest = host
if not floating_addresses or (source and source == dest):
return
LOG.info(_("Finishing migration network for instance %s"),
instance_uuid)
for address in floating_addresses:
floating_ip = objects.FloatingIP.get_by_address(context, address)
if self._is_stale_floating_ip_address(context, floating_ip):
LOG.warn(_("Floating ip address |%(address)s| no longer "
"belongs to instance %(instance_uuid)s. Will not"
"setup it."),
{'address': address, 'instance_uuid': instance_uuid})
continue
floating_ip.host = dest
floating_ip.save()
interface = CONF.public_interface or floating_ip.interface
fixed_ip = floating_ip.fixed_ip
self.l3driver.add_floating_ip(floating_ip.address,
fixed_ip.address,
interface,
fixed_ip.network)
def _prepare_domain_entry(self, context, domainref):
scope = domainref.scope
if scope == 'private':
this_domain = {'domain': domainref.domain,
'scope': scope,
'availability_zone': domainref.availability_zone}
else:
this_domain = {'domain': domainref.domain,
'scope': scope,
'project': domainref.project_id}
return this_domain
def get_dns_domains(self, context):
domains = []
domain_list = objects.DNSDomainList.get_all(context)
floating_driver_domain_list = self.floating_dns_manager.get_domains()
instance_driver_domain_list = self.instance_dns_manager.get_domains()
for dns_domain in domain_list:
if (dns_domain.domain in floating_driver_domain_list or
dns_domain.domain in instance_driver_domain_list):
domain_entry = self._prepare_domain_entry(context,
dns_domain)
if domain_entry:
domains.append(domain_entry)
else:
LOG.warn(_('Database inconsistency: DNS domain |%s| is '
'registered in the Nova db but not visible to '
'either the floating or instance DNS driver. It '
'will be ignored.'), dns_domain.domain)
return domains
def add_dns_entry(self, context, address, name, dns_type, domain):
self.floating_dns_manager.create_entry(name, address,
dns_type, domain)
def modify_dns_entry(self, context, address, name, domain):
self.floating_dns_manager.modify_address(name, address,
domain)
def delete_dns_entry(self, context, name, domain):
self.floating_dns_manager.delete_entry(name, domain)
def _delete_all_entries_for_ip(self, context, address):
domain_list = self.get_dns_domains(context)
for domain in domain_list:
names = self.get_dns_entries_by_address(context,
address,
domain['domain'])
for name in names:
self.delete_dns_entry(context, name, domain['domain'])
def get_dns_entries_by_address(self, context, address, domain):
return self.floating_dns_manager.get_entries_by_address(address,
domain)
def get_dns_entries_by_name(self, context, name, domain):
return self.floating_dns_manager.get_entries_by_name(name,
domain)
def create_private_dns_domain(self, context, domain, av_zone):
objects.DNSDomain.register_for_zone(context, domain, av_zone)
try:
self.instance_dns_manager.create_domain(domain)
except exception.FloatingIpDNSExists:
LOG.warn(_('Domain |%(domain)s| already exists, '
'changing zone to |%(av_zone)s|.'),
{'domain': domain, 'av_zone': av_zone})
def create_public_dns_domain(self, context, domain, project):
objects.DNSDomain.register_for_project(context, domain, project)
try:
self.floating_dns_manager.create_domain(domain)
except exception.FloatingIpDNSExists:
LOG.warn(_('Domain |%(domain)s| already exists, '
'changing project to |%(project)s|.'),
{'domain': domain, 'project': project})
def delete_dns_domain(self, context, domain):
objects.DNSDomain.delete_by_domain(context, domain)
self.floating_dns_manager.delete_domain(domain)
class LocalManager(base.Base, FloatingIP):
def __init__(self):
super(LocalManager, self).__init__()
# NOTE(vish): setting the host to none ensures that the actual
# l3driver commands for l3 are done via rpc.
self.host = None
self.servicegroup_api = servicegroup.API()
self.network_rpcapi = network_rpcapi.NetworkAPI()
self.floating_dns_manager = importutils.import_object(
CONF.floating_ip_dns_manager)
self.instance_dns_manager = importutils.import_object(
CONF.instance_dns_manager)
self.notifier = rpc.get_notifier('network', CONF.host)
|
astropy/astropy | refs/heads/main | astropy/io/fits/tests/__init__.py | 12 | # Licensed under a 3-clause BSD style license - see PYFITS.rst
import os
import shutil
import stat
import tempfile
import time
from astropy.io import fits
class FitsTestCase:
def setup(self):
self.data_dir = os.path.join(os.path.dirname(__file__), 'data')
self.temp_dir = tempfile.mkdtemp(prefix='fits-test-')
# Restore global settings to defaults
# TODO: Replace this when there's a better way to in the config API to
# force config values to their defaults
fits.conf.enable_record_valued_keyword_cards = True
fits.conf.extension_name_case_sensitive = False
fits.conf.strip_header_whitespace = True
fits.conf.use_memmap = True
def teardown(self):
if hasattr(self, 'temp_dir') and os.path.exists(self.temp_dir):
tries = 3
while tries:
try:
shutil.rmtree(self.temp_dir)
break
except OSError:
# Probably couldn't delete the file because for whatever
# reason a handle to it is still open/hasn't been
# garbage-collected
time.sleep(0.5)
tries -= 1
fits.conf.reset('enable_record_valued_keyword_cards')
fits.conf.reset('extension_name_case_sensitive')
fits.conf.reset('strip_header_whitespace')
fits.conf.reset('use_memmap')
def copy_file(self, filename):
"""Copies a backup of a test data file to the temp dir and sets its
mode to writeable.
"""
shutil.copy(self.data(filename), self.temp(filename))
os.chmod(self.temp(filename), stat.S_IREAD | stat.S_IWRITE)
def data(self, filename):
"""Returns the path to a test data file."""
return os.path.join(self.data_dir, filename)
def temp(self, filename):
""" Returns the full path to a file in the test temp dir."""
return os.path.join(self.temp_dir, filename)
|
rhattersley/cartopy | refs/heads/master | lib/cartopy/examples/un_flag.py | 2 | """
UN Flag
-------
A demonstration of the power of Matplotlib combined with cartopy's Azimuthal
Equidistant projection to reproduce the UN flag.
"""
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import matplotlib.pyplot as plt
from matplotlib.patches import PathPatch
import matplotlib.path
import matplotlib.ticker
from matplotlib.transforms import BboxTransform, Bbox
import numpy as np
# When drawing the flag, we can either use white filled land, or be a little
# more fancy and use the Natural Earth shaded relief imagery.
filled_land = True
def olive_path():
"""
Return a Matplotlib path representing a single olive branch from the
UN Flag. The path coordinates were extracted from the SVG at
https://commons.wikimedia.org/wiki/File:Flag_of_the_United_Nations.svg.
"""
olives_verts = np.array(
[[0, 2, 6, 9, 30, 55, 79, 94, 104, 117, 134, 157, 177,
188, 199, 207, 191, 167, 149, 129, 109, 87, 53, 22, 0, 663,
245, 223, 187, 158, 154, 150, 146, 149, 154, 158, 181, 184, 197,
181, 167, 153, 142, 129, 116, 119, 123, 127, 151, 178, 203, 220,
237, 245, 663, 280, 267, 232, 209, 205, 201, 196, 196, 201, 207,
211, 224, 219, 230, 220, 212, 207, 198, 195, 176, 197, 220, 239,
259, 277, 280, 663, 295, 293, 264, 250, 247, 244, 240, 240, 243,
244, 249, 251, 250, 248, 242, 245, 233, 236, 230, 228, 224, 222,
234, 249, 262, 275, 285, 291, 295, 296, 295, 663, 294, 293, 292,
289, 294, 277, 271, 269, 268, 265, 264, 264, 264, 272, 260, 248,
245, 243, 242, 240, 243, 245, 247, 252, 256, 259, 258, 257, 258,
267, 285, 290, 294, 297, 294, 663, 285, 285, 277, 266, 265, 265,
265, 277, 266, 268, 269, 269, 269, 268, 268, 267, 267, 264, 248,
235, 232, 229, 228, 229, 232, 236, 246, 266, 269, 271, 285, 285,
663, 252, 245, 238, 230, 246, 245, 250, 252, 255, 256, 256, 253,
249, 242, 231, 214, 208, 208, 227, 244, 252, 258, 262, 262, 261,
262, 264, 265, 252, 663, 185, 197, 206, 215, 223, 233, 242, 237,
237, 230, 220, 202, 185, 663],
[8, 5, 3, 0, 22, 46, 46, 46, 35, 27, 16, 10, 18,
22, 28, 38, 27, 26, 33, 41, 52, 52, 52, 30, 8, 595,
77, 52, 61, 54, 53, 52, 53, 55, 55, 57, 65, 90, 106,
96, 81, 68, 58, 54, 51, 50, 51, 50, 44, 34, 43, 48,
61, 77, 595, 135, 104, 102, 83, 79, 76, 74, 74, 79, 84,
90, 109, 135, 156, 145, 133, 121, 100, 77, 62, 69, 67, 80,
92, 113, 135, 595, 198, 171, 156, 134, 129, 124, 120, 123, 126,
129, 138, 149, 161, 175, 188, 202, 177, 144, 116, 110, 105, 99,
108, 116, 126, 136, 147, 162, 173, 186, 198, 595, 249, 255, 261,
267, 241, 222, 200, 192, 183, 175, 175, 175, 175, 199, 221, 240,
245, 250, 256, 245, 233, 222, 207, 194, 180, 172, 162, 153, 154,
171, 184, 202, 216, 233, 249, 595, 276, 296, 312, 327, 327, 327,
327, 308, 284, 262, 240, 240, 239, 239, 242, 244, 247, 265, 277,
290, 293, 296, 300, 291, 282, 274, 253, 236, 213, 235, 252, 276,
595, 342, 349, 355, 357, 346, 326, 309, 303, 297, 291, 290, 297,
304, 310, 321, 327, 343, 321, 305, 292, 286, 278, 270, 276, 281,
287, 306, 328, 342, 595, 379, 369, 355, 343, 333, 326, 318, 328,
340, 349, 366, 373, 379, 595]]).T
olives_codes = np.array([1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 2, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4,
4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4,
2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4,
4, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 79], dtype=np.uint8)
return matplotlib.path.Path(olives_verts, olives_codes)
def main():
blue = '#4b92db'
# We're drawing a flag with a 3:5 aspect ratio.
fig = plt.figure(figsize=[7.5, 4.5], facecolor=blue)
# Put a blue background on the figure.
blue_background = PathPatch(matplotlib.path.Path.unit_rectangle(),
transform=fig.transFigure, color=blue,
zorder=-1)
fig.patches.append(blue_background)
# Set up the Azimuthal Equidistant and Plate Carree projections
# for later use.
az_eq = ccrs.AzimuthalEquidistant(central_latitude=90)
pc = ccrs.PlateCarree()
# Pick a suitable location for the map (which is in an Azimuthal
# Equidistant projection).
ax = fig.add_axes([0.25, 0.24, 0.5, 0.54], projection=az_eq, frameon=False)
# The background patch and axes frame are not needed in this example.
ax.background_patch.set_facecolor('none')
# We want the map to go down to -60 degrees latitude.
ax.set_extent([-180, 180, -60, 90], ccrs.PlateCarree())
# Importantly, we want the axes to be circular at the -60 latitude
# rather than cartopy's default behaviour of zooming in and becoming
# square.
_, patch_radius = az_eq.transform_point(0, -60, pc)
circular_path = matplotlib.path.Path.circle(0, patch_radius)
ax.set_boundary(circular_path)
if filled_land:
ax.add_feature(
cfeature.LAND, facecolor='white', edgecolor='none')
else:
ax.stock_img()
gl = ax.gridlines(crs=pc, linewidth=3, color='white', linestyle='-')
# Meridians every 45 degrees, and 5 parallels.
gl.xlocator = matplotlib.ticker.FixedLocator(np.arange(0, 361, 45))
parallels = np.linspace(-60, 70, 5, endpoint=True)
gl.ylocator = matplotlib.ticker.FixedLocator(parallels)
# Now add the olive branches around the axes. We do this in normalised
# figure coordinates
olive_leaf = olive_path()
olives_bbox = Bbox.null()
olives_bbox.update_from_path(olive_leaf)
# The first olive branch goes from left to right.
olive1_axes_bbox = Bbox([[0.45, 0.15], [0.725, 0.75]])
olive1_trans = BboxTransform(olives_bbox, olive1_axes_bbox)
# THe second olive branch goes from right to left (mirroring the first).
olive2_axes_bbox = Bbox([[0.55, 0.15], [0.275, 0.75]])
olive2_trans = BboxTransform(olives_bbox, olive2_axes_bbox)
olive1 = PathPatch(olive_leaf, facecolor='white', edgecolor='none',
transform=olive1_trans + fig.transFigure)
olive2 = PathPatch(olive_leaf, facecolor='white', edgecolor='none',
transform=olive2_trans + fig.transFigure)
fig.patches.append(olive1)
fig.patches.append(olive2)
plt.show()
if __name__ == '__main__':
main()
|
rotofly/odoo | refs/heads/master | addons/mrp_byproduct/mrp_byproduct.py | 108 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv import osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class mrp_subproduct(osv.osv):
_name = 'mrp.subproduct'
_description = 'Byproduct'
_columns={
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'subproduct_type': fields.selection([('fixed','Fixed'),('variable','Variable')], 'Quantity Type', required=True, help="Define how the quantity of byproducts will be set on the production orders using this BoM.\
'Fixed' depicts a situation where the quantity of created byproduct is always equal to the quantity set on the BoM, regardless of how many are created in the production order.\
By opposition, 'Variable' means that the quantity will be computed as\
'(quantity of byproduct set on the BoM / quantity of manufactured product set on the BoM * quantity of manufactured product in the production order.)'"),
'bom_id': fields.many2one('mrp.bom', 'BoM', ondelete='cascade'),
}
_defaults={
'subproduct_type': 'variable',
'product_qty': lambda *a: 1.0,
}
def onchange_product_id(self, cr, uid, ids, product_id, context=None):
""" Changes UoM if product_id changes.
@param product_id: Changed product_id
@return: Dictionary of changed values
"""
if product_id:
prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
v = {'product_uom': prod.uom_id.id}
return {'value': v}
return {}
def onchange_uom(self, cr, uid, ids, product_id, product_uom, context=None):
res = {'value':{}}
if not product_uom or not product_id:
return res
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context)
if uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
res['value'].update({'product_uom': product.uom_id.id})
return res
class mrp_bom(osv.osv):
_name = 'mrp.bom'
_description = 'Bill of Material'
_inherit='mrp.bom'
_columns={
'sub_products':fields.one2many('mrp.subproduct', 'bom_id', 'Byproducts', copy=True),
}
class mrp_production(osv.osv):
_description = 'Production'
_inherit= 'mrp.production'
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms production order and calculates quantity based on subproduct_type.
@return: Newly generated picking Id.
"""
move_obj = self.pool.get('stock.move')
picking_id = super(mrp_production,self).action_confirm(cr, uid, ids, context=context)
product_uom_obj = self.pool.get('product.uom')
for production in self.browse(cr, uid, ids):
source = production.product_id.property_stock_production.id
if not production.bom_id:
continue
for sub_product in production.bom_id.sub_products:
product_uom_factor = product_uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, production.bom_id.product_uom.id)
qty1 = sub_product.product_qty
qty2 = production.product_uos and production.product_uos_qty or False
product_uos_factor = 0.0
if qty2 and production.bom_id.product_uos.id:
product_uos_factor = product_uom_obj._compute_qty(cr, uid, production.product_uos.id, production.product_uos_qty, production.bom_id.product_uos.id)
if sub_product.subproduct_type == 'variable':
if production.product_qty:
qty1 *= product_uom_factor / (production.bom_id.product_qty or 1.0)
if production.product_uos_qty:
qty2 *= product_uos_factor / (production.bom_id.product_uos_qty or 1.0)
data = {
'name': 'PROD:'+production.name,
'date': production.date_planned,
'product_id': sub_product.product_id.id,
'product_uom_qty': qty1,
'product_uom': sub_product.product_uom.id,
'product_uos_qty': qty2,
'product_uos': production.product_uos and production.product_uos.id or False,
'location_id': source,
'location_dest_id': production.location_dest_id.id,
'move_dest_id': production.move_prod_id.id,
'production_id': production.id
}
move_id = move_obj.create(cr, uid, data, context=context)
move_obj.action_confirm(cr, uid, [move_id], context=context)
return picking_id
def _get_subproduct_factor(self, cr, uid, production_id, move_id=None, context=None):
"""Compute the factor to compute the qty of procucts to produce for the given production_id. By default,
it's always equal to the quantity encoded in the production order or the production wizard, but with
the module mrp_byproduct installed it can differ for byproducts having type 'variable'.
:param production_id: ID of the mrp.order
:param move_id: ID of the stock move that needs to be produced. Identify the product to produce.
:return: The factor to apply to the quantity that we should produce for the given production order and stock move.
"""
sub_obj = self.pool.get('mrp.subproduct')
move_obj = self.pool.get('stock.move')
production_obj = self.pool.get('mrp.production')
production_browse = production_obj.browse(cr, uid, production_id, context=context)
move_browse = move_obj.browse(cr, uid, move_id, context=context)
subproduct_factor = 1
sub_id = sub_obj.search(cr, uid,[('product_id', '=', move_browse.product_id.id),('bom_id', '=', production_browse.bom_id.id), ('subproduct_type', '=', 'variable')], context=context)
if sub_id:
subproduct_record = sub_obj.browse(cr ,uid, sub_id[0], context=context)
if subproduct_record.bom_id.product_qty:
subproduct_factor = subproduct_record.product_qty / subproduct_record.bom_id.product_qty
return subproduct_factor
return super(mrp_production, self)._get_subproduct_factor(cr, uid, production_id, move_id, context=context)
class change_production_qty(osv.osv_memory):
_inherit = 'change.production.qty'
def _update_product_to_produce(self, cr, uid, prod, qty, context=None):
bom_obj = self.pool.get('mrp.bom')
move_lines_obj = self.pool.get('stock.move')
prod_obj = self.pool.get('mrp.production')
for m in prod.move_created_ids:
if m.product_id.id == prod.product_id.id:
move_lines_obj.write(cr, uid, [m.id], {'product_uom_qty': qty})
else:
for sub_product_line in prod.bom_id.sub_products:
if sub_product_line.product_id.id == m.product_id.id:
factor = prod_obj._get_subproduct_factor(cr, uid, prod.id, m.id, context=context)
subproduct_qty = sub_product_line.subproduct_type == 'variable' and qty * factor or sub_product_line.product_qty
move_lines_obj.write(cr, uid, [m.id], {'product_uom_qty': subproduct_qty})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
iliamo/sensor-prototype | refs/heads/master | .waf-tools/default-compiler-flags.py | 1 | # -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
from waflib import Logs, Configure
def options(opt):
opt.add_option('--debug', '--with-debug', action='store_true', default=False, dest='debug',
help='''Compile in debugging mode without optimizations (-O0 or -Og)''')
def configure(conf):
areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0)
defaultFlags = ['-std=c++0x', '-std=c++11',
'-stdlib=libc++', # clang on OSX < 10.9 by default uses gcc's
# libstdc++, which is not C++11 compatible
'-pedantic', '-Wall']
if conf.options.debug:
conf.define('_DEBUG', 1)
defaultFlags += ['-O0',
'-Og', # gcc >= 4.8
'-g3',
'-fcolor-diagnostics', # clang
'-fdiagnostics-color', # gcc >= 4.9
'-Werror',
'-Wno-error=deprecated-register',
]
if areCustomCxxflagsPresent:
missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS]
if len(missingFlags) > 0:
Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'"
% " ".join(conf.env.CXXFLAGS))
Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags))
else:
conf.add_supported_cxxflags(defaultFlags)
else:
defaultFlags += ['-O2', '-g']
if not areCustomCxxflagsPresent:
conf.add_supported_cxxflags(defaultFlags)
# clang on OSX < 10.9 by default uses gcc's libstdc++, which is not C++11 compatible
conf.add_supported_linkflags(['-stdlib=libc++'])
@Configure.conf
def add_supported_cxxflags(self, cxxflags):
"""
Check which cxxflags are supported by compiler and add them to env.CXXFLAGS variable
"""
self.start_msg('Checking supported CXXFLAGS')
supportedFlags = []
for flag in cxxflags:
if self.check_cxx(cxxflags=['-Werror', flag], mandatory=False):
supportedFlags += [flag]
self.end_msg(' '.join(supportedFlags))
self.env.CXXFLAGS = supportedFlags + self.env.CXXFLAGS
@Configure.conf
def add_supported_linkflags(self, linkflags):
"""
Check which linkflags are supported by compiler and add them to env.LINKFLAGS variable
"""
self.start_msg('Checking supported LINKFLAGS')
supportedFlags = []
for flag in linkflags:
if self.check_cxx(linkflags=['-Werror', flag], mandatory=False):
supportedFlags += [flag]
self.end_msg(' '.join(supportedFlags))
self.env.LINKFLAGS = supportedFlags + self.env.LINKFLAGS
|
frankvdp/django | refs/heads/master | django/core/management/commands/startapp.py | 108 | from django.core.management.templates import TemplateCommand
class Command(TemplateCommand):
help = (
"Creates a Django app directory structure for the given app name in "
"the current directory or optionally in the given directory."
)
missing_args_message = "You must provide an application name."
def handle(self, **options):
app_name = options.pop('name')
target = options.pop('directory')
super().handle('app', app_name, target, **options)
|
caot/intellij-community | refs/heads/master | python/testData/completion/noUnderscoredBuiltin.after.py | 83 | ___Cl
|
karibou/sosreport | refs/heads/master | sos/policies/ibmkvm.py | 8 | # Copyright (C) IBM Corporation, 2015
#
# Authors: Kamalesh Babulal <kamalesh@linux.vnet.ibm.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
from __future__ import print_function
from sos.plugins import PowerKVMPlugin, ZKVMPlugin, RedHatPlugin
from sos.policies.redhat import RedHatPolicy
import os
class PowerKVMPolicy(RedHatPolicy):
distro = "PowerKVM"
vendor = "IBM"
vendor_url = "http://www-03.ibm.com/systems/power/software/linux/powerkvm"
def __init__(self):
super(PowerKVMPolicy, self).__init__()
self.valid_subclasses = [PowerKVMPlugin, RedHatPlugin]
@classmethod
def check(self):
"""This method checks to see if we are running on PowerKVM.
It returns True or False."""
return os.path.isfile('/etc/ibm_powerkvm-release')
def dist_version(self):
try:
with open('/etc/ibm_powerkvm-release', 'r') as fp:
version_string = fp.read()
return version_string[2][0]
return False
except:
return False
class ZKVMPolicy(RedHatPolicy):
distro = "IBM Hypervisor"
vendor = "IBM Hypervisor"
vendor_url = "http://www.ibm.com/systems/z/linux/IBMHypervisor/support/"
def __init__(self):
super(ZKVMPolicy, self).__init__()
self.valid_subclasses = [ZKVMPlugin, RedHatPlugin]
@classmethod
def check(self):
"""This method checks to see if we are running on IBM Z KVM. It
returns True or False."""
return os.path.isfile('/etc/base-release')
def dist_version(self):
try:
with open('/etc/base-release', 'r') as fp:
version_string = fp.read()
return version_string.split(' ', 4)[3][0]
return False
except:
return False
# vim: set ts=4 sw=4
|
Azure/azure-sdk-for-python | refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/aio/operations/_bastion_hosts_operations.py | 1 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BastionHostsOperations:
"""BastionHostsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified Bastion Host.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}'} # type: ignore
async def get(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> "_models.BastionHost":
"""Gets the specified Bastion Host.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BastionHost, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.BastionHost
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BastionHost"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BastionHost', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
bastion_host_name: str,
parameters: "_models.BastionHost",
**kwargs
) -> "_models.BastionHost":
cls = kwargs.pop('cls', None) # type: ClsType["_models.BastionHost"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'BastionHost')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('BastionHost', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('BastionHost', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
bastion_host_name: str,
parameters: "_models.BastionHost",
**kwargs
) -> AsyncLROPoller["_models.BastionHost"]:
"""Creates or updates the specified Bastion Host.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param parameters: Parameters supplied to the create or update Bastion Host operation.
:type parameters: ~azure.mgmt.network.v2019_08_01.models.BastionHost
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BastionHost or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_08_01.models.BastionHost]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BastionHost"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BastionHost', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["_models.BastionHostListResult"]:
"""Lists all Bastion Hosts in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionHostListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_08_01.models.BastionHostListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BastionHostListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionHostListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/bastionHosts'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.BastionHostListResult"]:
"""Lists all Bastion Hosts in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionHostListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_08_01.models.BastionHostListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BastionHostListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionHostListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts'} # type: ignore
|
tempbottle/python-driver | refs/heads/master | cassandra/cqlengine/functions.py | 8 | # Copyright 2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from datetime import datetime
from cassandra.cqlengine import UnicodeMixin, ValidationError
import sys
if sys.version_info >= (2, 7):
def get_total_seconds(td):
return td.total_seconds()
else:
def get_total_seconds(td):
# integer division used here to emulate built-in total_seconds
return ((86400 * td.days + td.seconds) * 10 ** 6 + td.microseconds) / 10 ** 6
class QueryValue(UnicodeMixin):
"""
Base class for query filter values. Subclasses of these classes can
be passed into .filter() keyword args
"""
format_string = '%({0})s'
def __init__(self, value):
self.value = value
self.context_id = None
def __unicode__(self):
return self.format_string.format(self.context_id)
def set_context_id(self, ctx_id):
self.context_id = ctx_id
def get_context_size(self):
return 1
def update_context(self, ctx):
ctx[str(self.context_id)] = self.value
class BaseQueryFunction(QueryValue):
"""
Base class for filtering functions. Subclasses of these classes can
be passed into .filter() and will be translated into CQL functions in
the resulting query
"""
pass
class TimeUUIDQueryFunction(BaseQueryFunction):
def __init__(self, value):
"""
:param value: the time to create bounding time uuid from
:type value: datetime
"""
if not isinstance(value, datetime):
raise ValidationError('datetime instance is required')
super(TimeUUIDQueryFunction, self).__init__(value)
def to_database(self, val):
epoch = datetime(1970, 1, 1, tzinfo=val.tzinfo)
offset = get_total_seconds(epoch.tzinfo.utcoffset(epoch)) if epoch.tzinfo else 0
return int((get_total_seconds(val - epoch) - offset) * 1000)
def update_context(self, ctx):
ctx[str(self.context_id)] = self.to_database(self.value)
class MinTimeUUID(TimeUUIDQueryFunction):
"""
return a fake timeuuid corresponding to the smallest possible timeuuid for the given timestamp
http://cassandra.apache.org/doc/cql3/CQL.html#timeuuidFun
"""
format_string = 'MinTimeUUID(%({0})s)'
class MaxTimeUUID(TimeUUIDQueryFunction):
"""
return a fake timeuuid corresponding to the largest possible timeuuid for the given timestamp
http://cassandra.apache.org/doc/cql3/CQL.html#timeuuidFun
"""
format_string = 'MaxTimeUUID(%({0})s)'
class Token(BaseQueryFunction):
"""
compute the token for a given partition key
http://cassandra.apache.org/doc/cql3/CQL.html#tokenFun
"""
def __init__(self, *values):
if len(values) == 1 and isinstance(values[0], (list, tuple)):
values = values[0]
super(Token, self).__init__(values)
self._columns = None
def set_columns(self, columns):
self._columns = columns
def get_context_size(self):
return len(self.value)
def __unicode__(self):
token_args = ', '.join('%({0})s'.format(self.context_id + i) for i in range(self.get_context_size()))
return "token({0})".format(token_args)
def update_context(self, ctx):
for i, (col, val) in enumerate(zip(self._columns, self.value)):
ctx[str(self.context_id + i)] = col.to_database(val)
|
pli3/enigma2-git | refs/heads/master | lib/python/Plugins/Extensions/HbbTV/plugin.py | 2 | from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Screens.InfoBar import InfoBar
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.InfoBarGenerics import InfoBarNotifications
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Screens.HelpMenu import HelpableScreen
from Screens.ChannelSelection import service_types_tv
from Components.Language import language
from Components.PluginComponent import plugins
from Components.Button import Button
from Components.Sources.StaticText import StaticText
from Components.ActionMap import NumberActionMap, ActionMap, HelpableActionMap
from Components.ServiceEventTracker import ServiceEventTracker
from Components.MenuList import MenuList
from Components.Label import Label, MultiColorLabel
from Components.ConfigList import ConfigListScreen
from Components.VolumeControl import VolumeControl
from Components.Pixmap import Pixmap
from Components.config import config, ConfigSubsection, ConfigPosition, getConfigListEntry, ConfigBoolean, ConfigInteger, ConfigText, ConfigSelection, configfile
from enigma import eTimer, eConsoleAppContainer, getDesktop, eServiceReference, iPlayableService, iServiceInformation, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_HALIGN_CENTER, RT_VALIGN_CENTER, getPrevAsciiCode, eRCInput, fbClass, eServiceCenter
from bookmark import BookmarkManager, BookmarkData, CategoryData
import os, struct, threading, stat, select, time, socket, select
from __init__ import _
strIsEmpty = lambda x: x is None or len(x) == 0
HBBTVAPP_PATH = "/usr/local/hbb-browser"
COMMAND_PATH = '/tmp/.sock.hbbtv.cmd'
_g_helper = None
class GlobalValues:
command_util = None
command_server = None
before_service = None
channel_info_sid = None
channel_info_onid = None
channel_info_tsid = None
channel_info_name = None
channel_info_orgid = None
hbbtv_handelr = None
packet_m = 0xBBADBEE
packet_h = '!IIII'
packet_hl = struct.calcsize(packet_h)
need_restart = False
plugin_browser = None
__gval__ = GlobalValues()
def setPluginBrowser(browser=None):
global __gval__
__gval__.plugin_browser = browser
def getPluginBrowser():
global __gval__
return __gval__.plugin_browser
def getPacketHeaders():
global __gval__
return (__gval__.packet_m, __gval__.packet_h, __gval__.packet_hl)
def setChannelInfo(sid, onid, tsid, name, orgid):
if sid is None: sid = 0;
if onid is None: onid = 0;
if tsid is None: tsid = 0;
if name is None: name = "";
if orgid is None: orgid = 0;
global __gval__
__gval__.channel_info_sid = sid
__gval__.channel_info_onid = onid
__gval__.channel_info_tsid = tsid
__gval__.channel_info_name = name
__gval__.channel_info_orgid = orgid
print "Set Channel Info >> sid : %X, onid : %X, tsid : %X, name : %s, orgid : %d " % (sid, onid, tsid, name, orgid)
def getChannelInfos():
global __gval__
print "Get Channel Info >> sid : %X, onid : %X, tsid : %X, name : %s, orgid : %d " % (__gval__.channel_info_sid,
__gval__.channel_info_onid, __gval__.channel_info_tsid, __gval__.channel_info_name, __gval__.channel_info_orgid)
return (__gval__.channel_info_sid,
__gval__.channel_info_onid,
__gval__.channel_info_tsid,
__gval__.channel_info_name,
__gval__.channel_info_orgid)
def isNeedRestart():
global __gval__
print "Need Restart(GET) : ", __gval__.need_restart
return __gval__.need_restart
def setNeedRestart(n):
global __gval__
__gval__.need_restart = n
print "Need Restart(SET) : ", __gval__.need_restart
def getCommandUtil():
global __gval__
return __gval__.command_util
def getCommandServer():
global __gval__
return __gval__.command_server
def setBeforeService(s):
global __gval__
__gval__.before_service = s
def getBeforeService():
global __gval__
return __gval__.before_service
def _unpack(packed_data):
(mg, h, hlen) = getPacketHeaders()
if strIsEmpty(packed_data):
return None
(m, o, l, s) = struct.unpack(h, packed_data[:hlen])
if m != mg:
return None
d = 0
if l > 0:
d = packed_data[hlen:hlen+l]
return (o,d,s)
def _pack(opcode, params=None, reserved=0):
(m, h, hlen) = getPacketHeaders()
if strIsEmpty(params):
params = ''
packed_data = struct.pack(h, m, opcode, len(params), reserved)
return packed_data + params
class MMSStreamURL:
headers = [
'GET %s HTTP/1.0'
,'Accept: */* '
,'User-Agent: NSPlayer/7.10.0.3059 '
,'Host: %s '
,'Connection: Close '
]
def __init__(self):
self.sendmsg = ''
for m in self.headers:
self.sendmsg += m + '\n'
self.sendmsg += '\n\n'
def request(self, host, port=80, location='/'):
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sock.connect((host, port))
sock.send(self.sendmsg%(location, host))
print "Request."
print self.sendmsg%(location, host)
fullydata = ''
while 1:
res = sock.recv(1024)
if res == '': break
fullydata += res
sock.close()
return fullydata
def parse(self, data):
for d in data.splitlines():
if d.startswith('Location: '):
return d[9:]
return None
def getLocationData(self, url):
url_list,host,location = None,None,None
try:
url = url[url.find(':')+3:]
url_list = url.split('/')
host = url_list[0]
location = url[len(url_list[0]):]
except Exception, err_msg:
print err_msg
return None
html = self.request(host=host, location=location)
return self.parse(html)
class OpCodeSet:
def __init__(self):
self._opcode_ = {
"OP_UNKNOWN" : 0x0000
,"OP_HBBTV_EXIT" : 0x0001
,"OP_HBBTV_OPEN_URL" : 0x0002
,"OP_HBBTV_LOAD_AIT" : 0x0003
,"OP_HBBTV_UNLOAD_AIT" : 0x0004
,"OP_HBBTV_FULLSCREEN" : 0x0005
,"OP_HBBTV_TITLE" : 0x0006
,"OP_HBBTV_RETRY_OPEN_URL" : 0x0009
,"OP_HBBTV_CHANGE_CHANNEL" : 0x000A
,"OP_OIPF_GET_CHANNEL_INFO_URL" : 0x0101
,"OP_OIPF_GET_CHANNEL_INFO_AIT" : 0x0102
,"OP_OIPF_GET_CHANNEL_INFO_LIST": 0x0103
,"OP_VOD_URI" : 0x0201
,"OP_VOD_PLAY" : 0x0202
,"OP_VOD_STOP" : 0x0203
,"OP_VOD_PAUSE" : 0x0204
,"OP_VOD_STATUS" : 0x0205
,"OP_VOD_FORBIDDEN" : 0x0206
,"OP_VOD_STOPED" : 0x0207
,"OP_VOD_SPEED_CTRL" : 0x0208
,"OP_VOD_SEEK_CTRL" : 0x0209
,"OP_BROWSER_OPEN_URL" : 0x0301
,"OP_BROWSER_VKBD_REQ" : 0x0309
,"OP_BROWSER_VKBD_RES" : 0x030A
,"OP_BROWSER_VKBD_PASTE_REQ" : 0x030B
,"OP_BROWSER_VKBD_PASTE_KEY" : 0x030C
,"OP_BROWSER_VKBD_PASTE_MOUSE" : 0x030D
,"OP_BROWSER_MENU_REQ" : 0x030E
,"OP_BROWSER_MENU_RES" : 0x030F
,"OP_DVBAPP_VOL_UP" : 0x0401
,"OP_DVBAPP_VOL_DOWN" : 0x0402
,"OP_SYSTEM_OUT_OF_MEMORY" : 0x0501
,"OP_SYSTEM_NOTIFY_MY_PID" : 0x0502
}
self._opstr_ = {
0x0000 : "OP_UNKNOWN"
,0x0001 : "OP_HBBTV_EXIT"
,0x0002 : "OP_HBBTV_OPEN_URL"
,0x0003 : "OP_HBBTV_LOAD_AIT"
,0x0004 : "OP_HBBTV_UNLOAD_AIT"
,0x0005 : "OP_HBBTV_FULLSCREEN"
,0x0006 : "OP_HBBTV_TITLE"
,0x0009 : "OP_HBBTV_RETRY_OPEN_URL"
,0x000A : "OP_HBBTV_CHANGE_CHANNEL"
,0x0101 : "OP_OIPF_GET_CHANNEL_INFO_URL"
,0x0102 : "OP_OIPF_GET_CHANNEL_INFO_AIT"
,0x0103 : "OP_OIPF_GET_CHANNEL_INFO_LIST"
,0x0201 : "OP_VOD_URI"
,0x0202 : "OP_VOD_PLAY"
,0x0203 : "OP_VOD_STOP"
,0x0204 : "OP_VOD_PAUSE"
,0x0205 : "OP_VOD_STATUS"
,0x0206 : "OP_VOD_FORBIDDEN"
,0x0207 : "OP_VOD_STOPED"
,0x0208 : "OP_VOD_SPEED_CTRL"
,0x0209 : "OP_VOD_SEEK_CTRL"
,0x0301 : "OP_BROWSER_OPEN_URL"
,0x0309 : "OP_BROWSER_VKBD_REQ"
,0x030A : "OP_BROWSER_VKBD_RES"
,0x030B : "OP_BROWSER_VKBD_PASTE_REQ"
,0x030C : "OP_BROWSER_VKBD_PASTE_KEY"
,0x030D : "OP_BROWSER_VKBD_PASTE_MOUSE"
,0x030E : "OP_BROWSER_MENU_REQ"
,0x030F : "OP_BROWSER_MENU_RES"
,0x0401 : "OP_DVBAPP_VOL_UP"
,0x0402 : "OP_DVBAPP_VOL_DOWN"
,0x0501 : "OP_SYSTEM_OUT_OF_MEMORY"
,0x0502 : "OP_SYSTEM_NOTIFY_MY_PID"
}
def get(self, opstr):
try:
return self._opcode_[opstr]
except: pass
return self._opcode_["OP_UNKNOWN"]
def what(self, opcode):
try:
return self._opstr_[opcode]
except: pass
return self._opstr_["0x0000"]
class SocketParams:
def __init__(self):
self.protocol = None
self.type = None
self.addr = None
self.buf_size = 4096
self.handler = None
self.timeout = 5
self.destroy = None
class StreamServer:
def __init__(self, params):
self._protocol = params.protocol
self._type = params.type
self._addr = params.addr
self._buf_size = params.buf_size
self._handler = params.handler
self._timeout = params.timeout
self._destroy = params.destroy
self._terminated = False
self._server_thread = None
self.onHbbTVCloseCB = []
self.onSetPageTitleCB = []
def __del__(self):
if self._destroy is not None:
self._destroy(self._addr)
def stop(self):
self._terminated = True
if self._server_thread is not None:
self._server_thread.join()
self._server_thread = None
def start(self):
self._socket = socket.socket(self._protocol, self._type)
self._socket.settimeout(self._timeout)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._socket.bind(self._addr)
self._socket.listen(True)
self._server_thread = threading.Thread(target=self._listen)
self._server_thread.start()
def _listen(self):
select_list = [self._socket]
def _accept():
try:
conn, addr = self._socket.accept()
self._client(conn, addr)
except Exception, ErrMsg:
print "ServerSocket Error >>", ErrMsg
pass
while not self._terminated:
readable, writable, errored = select.select(select_list, [], [], self._timeout)
for s in readable:
if s is self._socket:
_accept()
def _client(self, conn, addr):
try:
send_data = ''
received_data = conn.recv(self._buf_size)
if self._handler is not None and not strIsEmpty(received_data):
send_data = self._handler.doHandle(received_data, self.onHbbTVCloseCB, self.onSetPageTitleCB)
self._send(conn, send_data)
except Exception, ErrMsg:
try: conn.close()
except:pass
if self._handler is not None:
self._handler.printError(ErrMsg)
def _send(self, conn, data) :
conn.send(data)
conn.close()
class ServerFactory:
def doListenUnixTCP(self, name, handler):
def destroy(name):
import os
try:
if os.path.exists(name):
os.unlink(name)
print "Removed ", name
except: pass
destroy(name)
params = SocketParams()
params.protocol = socket.AF_UNIX
params.type = socket.SOCK_STREAM
params.addr = name
params.handler = handler
params.destroy = destroy
streamServer = StreamServer(params)
streamServer.start()
return streamServer
def doListenInetTCP(self, ip, port, handler):
print "Not implemented yet!!"
def doListenUnixDGRAM(self, name, handler):
print "Not implemented yet!!"
def doListenInetDGRAM(self, ip, port, handler):
print "Not implemented yet!!"
class Handler:
def doUnpack(self, data):
return _unpack(data)
def doPack(self, opcode, params, reserved=0):
return _pack(opcode, params, reserved)
def doHandle(self, data, onCloseCB):
opcode, params = 0x0, 'Invalid Request!!'
return _pack(opcode, params)
def printError(self, reason):
print reason
class BrowserCommandUtil(OpCodeSet):
def __init__(self):
self._fd = None
OpCodeSet.__init__(self)
def isConnected(self):
if self._fd is None:
return False
return True
def doConnect(self, filename):
if not os.path.exists(filename):
print "File not exists :", filename
return False
try:
self._fd = os.open(filename, os.O_WRONLY|os.O_NONBLOCK)
if self._fd is None:
print "Fail to open file :", filename
return False
except Exception, ErrMsg:
print ErrMsg
self._fd = None
return False
return True
def doDisconnect(self):
if self._fd is None:
return
os.close(self._fd)
self._fd = None
def doSend(self, command, params=None, reserved=0):
if self._fd is None:
print "No found pipe!!"
return False
data = ''
try:
data = _pack(self.get(command), params, reserved)
if data is None:
return False
os.write(self._fd, data)
print "Send OK!! :", command
except: return False
return True
def sendCommand(self, command, params=None, reserved=0):
if not self.isConnected():
global COMMAND_PATH
self.doConnect(COMMAND_PATH)
result = self.doSend(command, params, reserved)
self.doDisconnect()
return result
class HandlerHbbTV(Handler):
_vod_service = None
def __init__(self, session):
self._session = session
self.opcode = OpCodeSet()
self.handle_map = {
0x0001 : self._cb_handleCloseHbbTVBrowser
,0x0006 : self._cb_handleSetPageTitle
,0x0009 : self._cb_handleHbbTVRetryOpen
,0x000A : self._cb_handleHbbTVChangeChannel
,0x0101 : self._cb_handleGetChannelInfoForUrl
,0x0102 : self._cb_handleGetChannelInfoForAIT
,0x0103 : self._cb_handleGetChannelInfoList
,0x0201 : self._cb_handleVODPlayerURI
,0x0202 : self._cb_handleVODPlayerPlay
,0x0203 : self._cb_handleVODPlayerStop
,0x0204 : self._cb_handleVODPlayerPlayPause
,0x0401 : self._cb_handleDVBAppVolUp
,0x0402 : self._cb_handleDVBAppVolDown
,0x0208 : self._cb_handleVODSpeedCtrl
,0x0209 : self._cb_handleVODSeekCtrl
,0x0501 : self._cb_handleSystemOutOfMemory
,0x0502 : self._cb_handleSystemNotufyMyPID
,0x0309 : self._cb_handleShowVirtualKeyboard
,0x030B : self._cb_handlePasteVirtualKeyboard
,0x030E : self._cb_handleBrowserMenuReq
}
self._on_close_cb = None
self._on_set_title_cb = None
self._vod_uri = None
self._retry_open_url = None
self._timer_retry_open = eTimer()
self._timer_paste_vkbd = eTimer()
self._curren_title = None
def _handle_dump(self, handle, opcode, data=None):
if True: return
print str(handle)
try:
print " - opcode : ", self.opcode.what(opcode)
except: pass
print " - data : ", data
def doHandle(self, data, onCloseCB, onSetPageTitleCB):
opcode, params, reserved = None, None, 0
self._on_close_cb = onCloseCB
self._on_set_title_cb = onSetPageTitleCB
try:
datas = self.doUnpack(data)
except Exception, ErrMsg:
print "Unpacking packet ERR :", ErrMsg
params = 'fail to unpack packet!!'
opcode = self.opcode.get("OP_UNKNOWN")
return self.doPack(opcode, params)
else:
opcode = datas[0]
params = datas[1]
self.opcode.what(opcode)
try:
#print self.handle_map[opcode]
(reserved, params) = self.handle_map[opcode](opcode, params)
except Exception, ErrMsg:
print "Handling packet ERR :", ErrMsg
params = 'fail to handle packet!!'
opcode = self.opcode.get("OP_UNKNOWN")
return self.doPack(opcode, params)
self._on_close_cb = None
self._on_set_title_cb = None
return self.doPack(opcode, params, reserved)
def _cb_handleHbbTVChangeChannel(self, opcode, data):
self._handle_dump(self._cb_handleHbbTVChangeChannel, opcode, data)
global _g_helper
if _g_helper is None:
return (0, "NOK")
dataItems = data.split(":")
sid = dataItems[0]
tsid = dataItems[1]
if not _g_helper.doChangeChannel(sid, tsid):
return (0, "NOK")
return (0, "OK")
def _cb_handleBrowserMenuReq(self, opcode, data):
self._handle_dump(self._cb_handleBrowserMenuReq, opcode, data)
fbClass.getInstance().unlock()
eRCInput.getInstance().unlock()
browser = getPluginBrowser()
if browser is not None:
browser.setCurrentPageUrl(data, self._curren_title)
return (0, "OK")
def _cb_handlePasteVirtualKeyboard(self, opcode, data):
self._handle_dump(self._cb_handlePasteVirtualKeyboard, opcode, data)
def _cb_PasteRefocusVirtualKeyboard():
self._timer_paste_vkbd.stop()
command_util = getCommandUtil()
command_util.sendCommand('OP_BROWSER_VKBD_PASTE_MOUSE')
try:
self._timer_paste_vkbd.callback.remove(_cb_PasteMouseVirtualKeyboard)
except: pass
def _cb_PasteKeyVirtualKeyboard():
self._timer_paste_vkbd.stop()
command_util = getCommandUtil()
command_util.sendCommand('OP_BROWSER_VKBD_PASTE_KEY')
try:
self._timer_paste_vkbd.callback.remove(_cb_PasteKeyVirtualKeyboard)
except: pass
self._timer_paste_vkbd.callback.append(_cb_PasteRefocusVirtualKeyboard)
self._timer_paste_vkbd.start(100)
def _cb_PasteMouseVirtualKeyboard():
self._timer_paste_vkbd.stop()
command_util = getCommandUtil()
command_util.sendCommand('OP_BROWSER_VKBD_PASTE_MOUSE')
#time.sleep(1)
#command_util.sendCommand('OP_BROWSER_VKBD_PASTE_MOUSE')
try:
self._timer_paste_vkbd.callback.remove(_cb_PasteMouseVirtualKeyboard)
except: pass
#self._timer_paste_vkbd.callback.append(_cb_PasteKeyVirtualKeyboard)
#self._timer_paste_vkbd.start(1000)
self._timer_paste_vkbd.callback.append(_cb_PasteMouseVirtualKeyboard)
self._timer_paste_vkbd.start(50)
return (0, "OK")
def _cb_virtualKeyboardClosed(self, data=None):
fbClass.getInstance().lock()
eRCInput.getInstance().lock()
command_util = getCommandUtil()
command_util.sendCommand('OP_BROWSER_VKBD_RES', data)
def _cb_handleShowVirtualKeyboard(self, opcode, data):
self._handle_dump(self._cb_handleShowVirtualKeyboard, opcode, data)
fbClass.getInstance().unlock()
eRCInput.getInstance().unlock()
if data == 0 or strIsEmpty(data):
data = ""
self._session.openWithCallback(self._cb_virtualKeyboardClosed, VirtualKeyBoard, title=("Please enter URL here"), text=data)
return (0, "OK")
def _cb_handleVODSeekCtrl(self, opcode, data):
self._handle_dump(self._cb_handleVODSeekCtrl, opcode, data)
headLen = struct.calcsize('!I')
unpackedData = struct.unpack('!I', data[:headLen])
seekTime = unpackedData[0]
service = self._session.nav.getCurrentService()
seekable = service.seek()
if seekable is None or not seekable.isCurrentlySeekable():
raise Exception("This stream is not support manual seek.")
pts = seekTime
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
return (0, "OK")
def _cb_handleHbbTVRetryOpen(self, opcode, data):
def _cb_HbbTVRetryOpenURL():
self._timer_retry_open.stop()
if self._retry_open_url is not None:
command_util = getCommandUtil()
command_util.sendCommand('OP_HBBTV_RETRY_OPEN_URL', params=self._retry_open_url)
self._retry_open_url = None
try:
self._timer_retry_open.callback.remove(_cb_HbbTVRetryOpenURL)
except: pass
self._handle_dump(self._cb_handleHbbTVRetryOpen, opcode, data)
headLen = struct.calcsize('!I')
unpackedData = struct.unpack('!I', data[:headLen])
delayTime = unpackedData[0]
restartUrl = data[headLen:]
self._retry_open_url = restartUrl.strip()
self._timer_retry_open.callback.append(_cb_HbbTVRetryOpenURL)
self._timer_retry_open.start(delayTime*1000)
return (0, "OK")
def _cb_handleSystemNotufyMyPID(self, opcode, data):
self._handle_dump(self._cb_handleSystemNotufyMyPID, opcode, data)
return (0, "OK")
def _cb_handleSystemOutOfMemory(self, opcode, data):
self._handle_dump(self._cb_handleSystemOutOfMemory, opcode, data)
setNeedRestart(True)
return (0, "OK")
def _cb_handleVODSpeedCtrl(self, opcode, data):
self._handle_dump(self._cb_handleVODSpeedCtrl, opcode, data)
headLen = struct.calcsize('!I')
unpackedData = struct.unpack('!I', data[:headLen])
playSpeed = unpackedData[0]
service = self._session.nav.getCurrentService()
pauseable = service.pause()
if playSpeed > 2:
playSpeed = 2
if pauseable.setFastForward(playSpeed) == -1:
pauseable.setFastForward(1)
raise Exception("This stream is not support trick play.")
return (0, "OK")
def _cb_handleDVBAppVolUp(self, opcode, data):
self._handle_dump(self._cb_handleDVBAppVolUp, opcode, data)
vcm = VolumeControl.instance
vcm.volUp()
return (0, "OK")
def _cb_handleDVBAppVolDown(self, opcode, data):
self._handle_dump(self._cb_handleDVBAppVolDown, opcode, data)
vcm = VolumeControl.instance
vcm.volDown()
return (0, "OK")
def _cb_handleGetChannelInfoForUrl(self, opcode, data):
self._handle_dump(self._cb_handleGetChannelInfoForUrl, opcode, data)
(sid, onid, tsid, name, orgid) = getChannelInfos()
namelen = len(name)
return (0, struct.pack('!IIII', sid, onid, tsid, namelen) + name)
def _cb_handleGetChannelInfoForAIT(self, opcode, data):
self._handle_dump(self._cb_handleGetChannelInfoForAIT, opcode, data)
(sid, onid, tsid, name, orgid) = getChannelInfos()
namelen = len(name)
return (0, struct.pack('!IIIII', orgid, sid, onid, tsid, namelen) + name)
def _cb_handleGetChannelInfoList(self, opcode, data):
self._handle_dump(self._cb_handleGetChannelInfoList, opcode, data)
(sid, onid, tsid, name, orgid) = getChannelInfos()
namelen = len(name)
channel_list_size = 1
return (channel_list_size, struct.pack('!IIII', sid, onid, tsid, namelen) + name)
def _cb_handleSetPageTitle(self, opcode, data):
self._handle_dump(self._cb_handleCloseHbbTVBrowser, opcode, data)
if data.startswith('file://') or data.startswith('http://'):
return "OK"
if self._on_set_title_cb is not None:
for x in self._on_set_title_cb:
try:
x(data)
self._curren_title = data
except Exception, ErrMsg:
if x in self._on_set_title_cb:
self._on_set_title_cb.remove(x)
return (0, "OK")
def _cb_handleCloseHbbTVBrowser(self, opcode, data):
self._timer_retry_open.stop()
try:
self._timer_retry_open.callback.remove(_cb_HbbTVRetryOpenURL)
except: pass
self._handle_dump(self._cb_handleCloseHbbTVBrowser, opcode, data)
if self._on_close_cb:
for x in self._on_close_cb:
try:
x()
except Exception, ErrMsg:
if x in self._on_close_cb:
self._on_close_cb.remove(x)
command_util = getCommandUtil()
command_util.sendCommand('OP_HBBTV_FULLSCREEN', None)
before_service = getBeforeService()
if before_service is not None:
self._session.nav.playService(before_service)
self._vod_uri = None
# iq - [
self._restore_before_service = True
# ]
return (0, "OK")
def _cb_handleVODPlayerURI(self, opcode, data):
self._vod_uri = None
hl = struct.calcsize('!II')
datas = struct.unpack('!II', data[:hl])
uriLength = datas[1]
vodUri = data[hl:hl+uriLength]
self._handle_dump(self._cb_handleVODPlayerURI, opcode, vodUri)
self._vod_uri = vodUri
return (0, "OK")
def doStop(self, restoreBeforeService=True, needStop=True):
if needStop == True:
# iq - [
time.sleep(2)
# ]
self._session.nav.stopService()
if self._vod_service is not None and restoreBeforeService:
before_service = getBeforeService()
self._session.nav.playService(before_service)
self._vod_uri = None
self._vod_service = None
def getUrl(self):
return self._vod_uri
def doRetryOpen(self, url):
if url is None:
return False
for ii in range(5):
self._vod_service = None
try:
print "Try to open vod [%d] : %s" % (ii, url)
self._vod_service = eServiceReference(4097, 0, url)
self._session.nav.playService(self._vod_service)
if self._vod_service is not None:
return True
except Exception, ErrMsg:
print "OpenVOD ERR :", ErrMsg
time.sleep(1)
return False
def _cb_handleVODPlayerPlay(self, opcode, data):
self._handle_dump(self._cb_handleVODPlayerPlay, opcode, data)
self.doStop(restoreBeforeService=False)
if self.doRetryOpen(url=self._vod_uri) == False:
self.doStop()
return (0, "OK")
def _cb_handleVODPlayerStop(self, opcode, data):
self._handle_dump(self._cb_handleVODPlayerStop, opcode, data)
# iq - [
# self.doStop()
self.doStop(self._restore_before_service)
# ]
return (0, "OK")
def _cb_handleVODPlayerPlayPause(self, opcode, data):
self._handle_dump(self._cb_handleVODPlayerPlayPause, opcode, data)
service = self._session.nav.getCurrentService()
try:
pauseFlag = data[0]
servicePause = service.pause()
if pauseFlag == 'U':
servicePause.unpause()
elif pauseFlag == 'P':
servicePause.pause()
except Exception, ErrMsg:
print "onPause ERR :", ErrMsg
return (0, "OK")
from libshm import SimpleSharedMemory
_g_ssm_ = None
class HbbTVWindow(Screen, InfoBarNotifications):
skin = """
<screen name="HbbTVWindow" position="0,0" size="1280,720" backgroundColor="transparent" flags="wfNoBorder" title="HbbTV Plugin">
</screen>
"""
def __init__(self, session, url=None, cbf=None, useAIT=False, profile=0):
self._session = session
fbClass.getInstance().lock()
eRCInput.getInstance().lock()
Screen.__init__(self, session)
InfoBarNotifications.__init__(self)
self.__event_tracker = ServiceEventTracker(screen = self, eventmap = {
iPlayableService.evUser+20: self._serviceForbiden,
iPlayableService.evStart: self._serviceStarted,
iPlayableService.evEOF: self._serviceEOF,
})
self._url = url
self._use_ait = useAIT
self._profile = profile
self._cb_closed_func = cbf
self.onLayoutFinish.append(self._layoutFinished)
command_server = getCommandServer()
if self._cb_set_page_title not in command_server.onSetPageTitleCB:
command_server.onSetPageTitleCB.append(self._cb_set_page_title)
if self._cb_close_window not in command_server.onHbbTVCloseCB:
command_server.onHbbTVCloseCB.append(self._cb_close_window)
self._closeTimer = eTimer()
self._closeTimer.callback.append(self._do_close)
self._currentServicePositionTimer = eTimer()
self._currentServicePositionTimer.callback.append(self._cb_currentServicePosition)
self._vodLength = 0
global _g_ssm_
self._ssm = _g_ssm_
self._vod_length = 0
def getVodPlayTime(self):
try:
service = self._session.nav.getCurrentService()
seek = service and service.seek()
l = seek.getLength()
p = seek.getPlayPosition()
if(not l[0] and not p[0]):
return (p[1], l[1])
return (90000,90000)
except: pass
return (-1,-1)
def _cb_currentServicePosition(self):
def getTimeString(t):
t = time.localtime(t/90000)
return "%2d:%02d:%02d" % (t.tm_hour, t.tm_min, t.tm_sec)
position,length = 0,0
try:
(position,length) = self.getVodPlayTime()
self._vod_length = length
if position == -1 and length == -1:
raise Exception("Can't get play status")
#print position, "/", length, " - ", getTimeString(position), "/", getTimeString(length)
self._ssm.setStatus(position, length, 1)
except Exception, ErrMsg:
print ErrMsg
self._serviceEOF()
def _serviceStarted(self):
try:
self._ssm.setStatus(0, 0, 0)
self._currentServicePositionTimer.start(1000)
except Exception, ErrMsg:
print ErrMsg
def _serviceEOF(self):
self._currentServicePositionTimer.stop()
def _layoutFinished(self):
self.setTitle(_('HbbTV Plugin'))
command_util = getCommandUtil()
profile = self._profile
(sid, onid, tsid, name, orgid) = getChannelInfos()
params = struct.pack('!IIIIII', orgid, profile, sid, onid, tsid, len(name)) + name
if self._use_ait:
command_util.sendCommand('OP_HBBTV_UNLOAD_AIT')
time.sleep(1)
command_util.sendCommand('OP_HBBTV_LOAD_AIT', params, 1)
return
command_util.sendCommand('OP_HBBTV_LOAD_AIT', params)
time.sleep(1)
command_util.sendCommand('OP_HBBTV_OPEN_URL', self._url)
def _cb_close_window(self):
self._closeTimer.start(1000)
def _do_close(self):
self._closeTimer.stop()
command_server = getCommandServer()
try:
if self._cb_set_page_title in command_server.onSetPageTitleCB:
command_server.onSetPageTitleCB.remove(self._cb_set_page_title)
except Exception, ErrMsg: pass
try:
if self._cb_close_window in command_server.onHbbTVCloseCB:
command_server.onHbbTVCloseCB.remove(self._cb_close_window)
except Exception, ErrMsg: pass
try:
if self._cb_closed_func is not None:
self._cb_closed_func()
except: pass
fbClass.getInstance().unlock()
eRCInput.getInstance().unlock()
self.close()
def _serviceForbiden(self):
global __gval__
real_url = MMSStreamURL().getLocationData(__gval__.hbbtv_handelr.getUrl())
print "Received URI :\n", real_url
if real_url is not None:
__gval__.hbbtv_handelr.doRetryOpen(real_url.strip())
def _cb_set_page_title(self, title=None):
print "page title :",title
if title is None:
return
self.setTitle(title)
class HbbTVHelper(Screen, InfoBarNotifications):
skin = """<screen name="HbbTVHelper" position="0,0" size="0,0" backgroundColor="transparent" flags="wfNoBorder" title=" "></screen>"""
def __init__(self, session):
global __gval__
__gval__.hbbtv_handelr = HandlerHbbTV(session)
__gval__.command_server = ServerFactory().doListenUnixTCP('/tmp/.sock.hbbtv.url', __gval__.hbbtv_handelr)
self._restart_opera()
Screen.__init__(self, session)
InfoBarNotifications.__init__(self)
self._session = session
self._timer_infobar = eTimer()
self._timer_infobar.callback.append(self._cb_registrate_infobar)
self._timer_infobar.start(1000)
self._excuted_browser = False
self._profile = 0
__gval__.command_util = BrowserCommandUtil()
global _g_ssm_
if _g_ssm_ is None:
_g_ssm_ = SimpleSharedMemory()
_g_ssm_.doConnect()
# self.__et = ServiceEventTracker(screen=self, eventmap={
# iPlayableService.evHBBTVInfo: self._cb_detectedAIT,
# iPlayableService.evUpdatedInfo: self._cb_updateInfo
# })
self._applicationList = None
self.mVuplusBox = False
issue = open("/etc/issue").read()
if(issue.startswith("Vuplus")):
self.mVuplusBox = True
def _cb_detectedAIT(self):
name = self._cb_ready_for_ait()
if name is not None and self.mVuplusBox:
from Screens.InfoBarGenerics import gHbbTvApplication
gHbbTvApplication.setApplicationName(str(name))
def _cb_updateInfo(self):
if not self._excuted_browser:
command_util = getCommandUtil()
command_util.sendCommand('OP_HBBTV_UNLOAD_AIT')
if self.mVuplusBox:
from Screens.InfoBarGenerics import gHbbTvApplication
gHbbTvApplication.setApplicationName("")
#self._applicationList = None
def _cb_registrate_infobar(self):
if InfoBar.instance:
self._timer_infobar.stop()
if self._cb_hbbtv_activated not in InfoBar.instance.onHBBTVActivation:
InfoBar.instance.onHBBTVActivation.append(self._cb_hbbtv_activated)
def _cb_ready_for_ait(self):
setChannelInfo(None, None, None, None, None)
service = self._session.nav.getCurrentService()
info = service and service.info()
if info is not None:
sid = info.getInfo(iServiceInformation.sSID)
onid = info.getInfo(iServiceInformation.sONID)
tsid = info.getInfo(iServiceInformation.sTSID)
name = info.getName()
if name is None:
name = ""
pmtid = info.getInfo(iServiceInformation.sPMTPID)
demux = info.getInfoString(iServiceInformation.sLiveStreamDemuxId)
# iq - [
if pmtid == -1:
return None
# ]
from aitreader import eAITSectionReader
reader = eAITSectionReader(demux, pmtid, sid)
if reader.doOpen():
reader.doParseApplications()
reader.doDump()
else: print "no data!!"
try:
self._applicationList = reader.getApplicationList()
if len(self._applicationList) > 0:
orgid = int(self._applicationList[0]["orgid"])
setChannelInfo(sid, onid, tsid, name, orgid)
return self._applicationList[0]["name"]
except: pass
return None
def _cb_hbbtv_activated(self, title=None, url=None):
if not self._is_browser_running():
message = _("HbbTV Browser was not running.\nPlease running browser before start HbbTV Application.")
self.session.open(MessageBox, message, MessageBox.TYPE_INFO)
return
service = self._session.nav.getCurrentlyPlayingServiceReference()
setBeforeService(service)
self._start_hbbtv_application(title, url)
def _start_hbbtv_application(self, title, url):
use_ait = False
tmp_url = self.getStartHbbTVUrl()
if url is None:
url = tmp_url
if strIsEmpty(url):
print "can't get url of hbbtv!!"
return
print "success to get url of hbbtv!! >>", url
if self._excuted_browser:
print "already excuted opera browser!!"
return
if isNeedRestart():
self._restart_opera()
time.sleep(2)
setNeedRestart(False)
for x in self._applicationList:
control_code = int(x["control"])
tmp_url = x["url"]
if tmp_url == url and control_code == 1:
use_ait = True
self._excuted_browser = True
self._session.open(HbbTVWindow, url, self._cb_closed_browser, use_ait, self._profile)
def _cb_closed_browser(self):
self._excuted_browser = False
def _start_opera(self):
if not self._is_browser_running():
global HBBTVAPP_PATH
start_command = '%s/launcher start'%(HBBTVAPP_PATH)
os.system(start_command)
def _stop_opera(self):
global HBBTVAPP_PATH
try: os.system('%s/launcher stop'%(HBBTVAPP_PATH))
except: pass
def _restart_opera(self):
global HBBTVAPP_PATH
try: os.system('%s/launcher restart'%(HBBTVAPP_PATH))
except: pass
def getStartHbbTVUrl(self):
url, self._applicationList, self._profile = None, None, 0
setChannelInfo(None, None, None, None, None)
service = self._session.nav.getCurrentService()
info = service and service.info()
if info is not None:
sid = info.getInfo(iServiceInformation.sSID)
onid = info.getInfo(iServiceInformation.sONID)
tsid = info.getInfo(iServiceInformation.sTSID)
name = info.getName()
if name is None:
name = ""
pmtid = info.getInfo(iServiceInformation.sPMTPID)
demux = info.getInfoString(iServiceInformation.sLiveStreamDemuxId)
if pmtid != -1:
try:
from aitreader import eAITSectionReader
reader = eAITSectionReader(demux, pmtid, sid)
if reader.doOpen():
reader.doParseApplications()
reader.doDump()
else: print "no data!!"
self._applicationList = reader.getApplicationList()
if len(self._applicationList) > 0:
orgid = int(self._applicationList[0]["orgid"])
setChannelInfo(sid, onid, tsid, name, orgid)
except Exception, ErrMsg:
print ErrMsg
return None
else:
return None
if self._applicationList is not None:
self._profile = self._applicationList[0]["profile"]
url = self._applicationList[0]["url"]
if url is None:
url = info.getInfoString(iServiceInformation.sHBBTVUrl)
return url
def showApplicationSelectionBox(self):
applications = []
if self.getStartHbbTVUrl():
for x in self._applicationList:
applications.append((x["name"], x))
else:
applications.append((_("No detected HbbTV applications."), None))
self._session.openWithCallback(self._application_selected, ChoiceBox, title=_("Please choose an HbbTV application."), list=applications)
def _application_selected(self, selected):
print selected
try:
if selected[1] is None: return
self._cb_hbbtv_activated(selected[1]["name"], selected[1]["url"])
# iq - [
__gval__.hbbtv_handelr._restore_before_service = False
# ]
except Exception, ErrMsg: print ErrMsg
def showBrowserConfigBox(self):
start_stop_mode = []
if self._is_browser_running():
start_stop_mode.append((_('Stop'),'Stop'))
else: start_stop_mode.append((_('Start'),'Start'))
self._session.openWithCallback(self._browser_config_selected, ChoiceBox, title=_("Please choose one."), list=start_stop_mode)
def _browser_config_selected(self, selected):
if selected is None:
return
try:
mode = selected[1]
if mode == 'Start':
if not self._is_browser_running():
self._start_opera()
elif mode == 'Stop':
self._stop_opera()
except Exception, ErrMsg: print "Config ERR :", ErrMsg
def _is_browser_running(self):
try:
global HBBTVAPP_PATH
ret = os.popen('%s/launcher check'%(HBBTVAPP_PATH)).read()
return ret.strip() != "0"
except Exception, ErrMsg:
print "Check Browser Running ERR :", ErrMsg
return False
def doChangeChannel(self, _sid, _tsid):
root = eServiceReference(service_types_tv)
if root is None:
return False
serviceList = eServiceCenter.getInstance().list(root)
if serviceList is None:
return False
while True:
service = serviceList.getNext()
if service is None or not service.valid():
break
#1:0:19:2840:3FB:1:C00000:0:0:0:
serviceRef = service.toString()
if strIsEmpty(serviceRef):
continue
serviceRefItems = serviceRef.split(":")
if len(serviceRefItems) < 5:
continue
sid = serviceRefItems[3]
tsid = serviceRefItems[4]
if sid == _sid and tsid == _tsid:
self._session.nav.playService(eServiceReference(serviceRef))
service = self._session.nav.getCurrentlyPlayingServiceReference()
setBeforeService(service)
return True
return False
class OperaBrowserSetting:
def __init__(self):
self._settingFileName = '/usr/local/hbb-browser/home/setting.ini'
self._start = None
self._type = None
self._read()
def _read(self):
f = open(self._settingFileName)
for line in f.readlines():
if line.startswith('start='):
tmp = line[6:len(line)-1].split()
self._start = tmp[0]
if len(tmp) > 1:
self._type = int(tmp[1])
else: self._type = 0
f.close()
def _write(self):
tmpstr = []
tmpstr.append('start=%s %d\n' % (self._start, self._type))
f = open(self._settingFileName, 'w')
f.writelines(tmpstr)
f.close()
def setData(self, start, types=0):
self._start = start
self._type = types
self._write()
def getData(self):
return {
'start':self._start,
'type':self._type,
}
class OperaBrowserPreferenceWindow(ConfigListScreen, Screen):
skin= """
<screen position="center,center" size="600,350" title="Preference">
<widget name="url" position="5,0" size="590,100" valign="center" font="Regular;20" />
<widget name="config" position="0,100" size="600,200" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="310,310" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="150,310" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="310,310" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" foregroundColor="#ffffff" transparent="1" />
<widget source="key_green" render="Label" position="150,310" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" foregroundColor="#ffffff" transparent="1" />
</screen>
"""
def __init__(self, session, currentUrl):
self.session = session
Screen.__init__(self, session)
self.menulist = []
ConfigListScreen.__init__(self, self.menulist)
self["actions"] = ActionMap(["OkCancelActions", "ShortcutActions", "WizardActions", "ColorActions", "SetupActions", ], {
"red" : self.keyRed,
"green" : self.keyGreen,
"ok" : self.keyOK,
"cancel" : self.keyRed
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["url"] = Label()
self._currentPageUrl = currentUrl
if self._currentPageUrl is None:
self._currentPageUrl = ''
self._startPageUrl = None
self.makeMenuEntry()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Preference'))
try:
d = OperaBrowserSetting().getData()
self._startPageUrl = d['start']
#d['type']
except: self._startPageUrl = 'http://vuplus.com'
self.updateStartPageUrl()
def updateStartPageUrl(self):
if self.menuItemStartpage.value == "startpage":
self["url"].setText(self._startPageUrl)
elif self.menuItemStartpage.value == "current":
self["url"].setText(self._currentPageUrl)
elif self.menuItemStartpage.value == "direct":
self["url"].setText('')
def keyGreen(self):
url = self["url"].getText()
if strIsEmpty(url):
self.session.open(MessageBox, _('Invalid URL!!(Empty)\nPlease, Input to the URL.'), type = MessageBox.TYPE_INFO)
return
mode = 0
if url.find('/usr/local/manual') > 0:
mode = 1
OperaBrowserSetting().setData(url, mode)
self.close()
def keyRed(self):
self.close()
def keyOK(self):
def _cb_directInputUrl(data):
if strIsEmpty(data):
return
self["url"].setText(data)
if self.menuItemStartpage.value == "direct":
self.session.openWithCallback(_cb_directInputUrl, VirtualKeyBoard, title=(_("Please enter URL here")), text='http://')
def keyLeft(self):
ConfigListScreen.keyLeft(self)
self.updateStartPageUrl()
def keyRight(self):
ConfigListScreen.keyRight(self)
self.updateStartPageUrl()
def makeMenuEntry(self):
l = []
l.append(("startpage", _("Start Page")))
if not strIsEmpty(self._currentPageUrl):
l.append(("current", _("Current Page")))
l.append(("direct", _("Direct Input")))
self.menuItemStartpage = ConfigSelection(default="startpage", choices = l)
self.menuEntryStartpage = getConfigListEntry(_("Startpage"), self.menuItemStartpage)
self.resetMenuList()
def resetMenuList(self):
self.menulist = []
self.menulist.append(self.menuEntryStartpage)
self["config"].list = self.menulist
self["config"].l.setList(self.menulist)
class BookmarkEditWindow(ConfigListScreen, Screen):
CATEGORY,BOOKMARK = 0,1
skin= """
<screen position="center,center" size="600,140" title="Bookmark Edit">
<widget name="config" position="0,0" size="600,100" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="310,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="150,100" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="310,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" foregroundColor="#ffffff" transparent="1" />
<widget source="key_green" render="Label" position="150,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" foregroundColor="#ffffff" transparent="1" />
<widget name="VKeyIcon" pixmap="skin_default/buttons/key_text.png" position="0,100" zPosition="10" size="35,25" transparent="1" alphatest="on" />
</screen>
"""
def __init__(self, session, _mode, _type, _data, _bm):
self.mMode = _mode
self.mType = _type
self.mData = _data
self.mSession = session
self.mBookmarkManager = _bm
if _data is not None:
print _data.mId
Screen.__init__(self, session)
self.menulist = []
ConfigListScreen.__init__(self, self.menulist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions",], {
"ok" : self.keyGreen,
"green" : self.keyGreen,
"red" : self.keyRed,
"cancel" : self.keyRed,
}, -2)
self["VKeyIcon"] = Pixmap()
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self.menuItemTitle = None
self.menuItemUrl = None
self.menuItemName = None
self.menuEntryName = None
self.menuEntryTitle = None
self.menuEntryUrl = None
self.makeConfigList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Bookmark') + ' ' + self.mMode)
def selectedItem(self):
currentPosition = self["config"].getCurrent()
if self.mType == BookmarkEditWindow.CATEGORY:
return (_("Name"), self.menuItemName)
else:
if currentPosition == self.menuEntryTitle:
return (_("Title"), self.menuItemTitle)
elif currentPosition == self.menuEntryUrl:
return (_("Url"), self.menuItemUrl)
return None
def showMessageBox(self, text):
msg = _("Invalid ") + text + _("!!(Empty)\nPlease, Input to the ") + text + "."
self.mSession.openWithCallback(self.showVKeyWindow, MessageBox, msg, MessageBox.TYPE_INFO)
return False
def showVKeyWindow(self, data=None):
itemTitle = ""
itemValue = ""
selected = self.selectedItem()
if selected is not None:
itemValue = selected[1].value
if strIsEmpty(itemValue):
itemValue = ""
itemTitle = selected[0]
self.session.openWithCallback(self.cbVKeyWindow, VirtualKeyBoard, title=itemTitle, text=itemValue)
def cbVKeyWindow(self, data=None):
if data is not None:
selected = self.selectedItem()
if selected is not None:
selected[1].setValue(data)
def saveData(self):
if self.mType == BookmarkEditWindow.CATEGORY:
if self.mMode == _('Add'):
categoryName = self.menuItemName.value
if strIsEmpty(categoryName):
return self.showMessageBox(_("Category Name"))
self.mBookmarkManager.addCategory(categoryName)
else:
if strIsEmpty(self.menuItemName.value):
return self.showMessageBox(_("Category Name"))
self.mData.mName = self.menuItemName.value
self.mBookmarkManager.updateCategory(self.mData)
else:
if self.mMode == _('Add'):
bookmarkTitle = self.menuItemTitle.value
bookmarkUrl = self.menuItemUrl.value
if strIsEmpty(bookmarkTitle):
self["config"].setCurrentIndex(0)
return self.showMessageBox(_("Bookmark Title"))
if strIsEmpty(bookmarkUrl):
self["config"].setCurrentIndex(1)
return self.showMessageBox(_("Bookmark URL"))
self.mBookmarkManager.addBookmark(bookmarkTitle, bookmarkUrl, self.mData.mParent, 0)
else:
if strIsEmpty(self.menuItemTitle.value):
self["config"].setCurrentIndex(0)
return self.showMessageBox(_("Bookmark Title"))
if strIsEmpty(self.menuItemUrl.value):
self["config"].setCurrentIndex(1)
return self.showMessageBox(_("Bookmark URL"))
self.mData.mTitle = self.menuItemTitle.value
self.mData.mUrl = self.menuItemUrl.value
self.mBookmarkManager.updateBookmark(self.mData)
return True
def keyGreen(self):
if not self.saveData():
return
self.close(True)
def keyRed(self):
self.close(False)
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def makeConfigList(self):
self.menulist = []
if self.mType == BookmarkEditWindow.CATEGORY:
self.menuItemName = ConfigText(default=self.mData.mName, visible_width=65, fixed_size=False)
self.menuEntryName = getConfigListEntry(_("Name"), self.menuItemName)
self.menulist.append(self.menuEntryName)
else:
self.menuItemTitle = ConfigText(default=self.mData.mTitle, visible_width=65, fixed_size=False)
self.menuItemUrl = ConfigText(default=self.mData.mUrl, visible_width=65, fixed_size=False)
self.menuEntryTitle = getConfigListEntry(_("Title"), self.menuItemTitle)
self.menuEntryUrl = getConfigListEntry(_("Url"), self.menuItemUrl)
self.menulist.append(self.menuEntryTitle)
self.menulist.append(self.menuEntryUrl)
self["config"].list = self.menulist
self["config"].l.setList(self.menulist)
class OperaBrowserBookmarkWindow(Screen):
skin = """
<screen name="HbbTVBrowserBookmarkWindow" position="center,center" size="600,400" title="Bookmark" >
<widget name="bookmarklist" position="0,0" size="600,200" zPosition="10" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/key_0.png" position="556,330" size="35,30" alphatest="on" />
<widget source="key_0" render="Label" position="258,330" zPosition="1" size="300,30" font="Regular;20" halign="right" valign="center"/>
<ePixmap pixmap="skin_default/buttons/red.png" position="5,360" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="155,360" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="305,360" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="450,360" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="5,360" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" foregroundColor="#ffffff" transparent="1" />
<widget source="key_green" render="Label" position="155,360" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" foregroundColor="#ffffff" transparent="1" />
<widget source="key_yellow" render="Label" position="305,360" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" foregroundColor="#ffffff" transparent="1" />
<widget source="key_blue" render="Label" position="450,360" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" foregroundColor="#ffffff" transparent="1" />
</screen>
"""
def __init__(self, _session, _url=None, _title=None):
self.mUrl = _url
self.mTitle = _title
self.mBookmarkManager = BookmarkManager.getInstance()
self.mSession = _session
Screen.__init__(self, _session)
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions","ColorActions", "NumberActions"], {
"ok" : self.keyOK,
"cancel": self.keyCancel,
"red" : self.keyRed,
"green" : self.keyGreen,
"yellow": self.keyYellow,
"blue" : self.keyBlue,
"0" : self.keyNumber,
},-2)
self["key_red"] = StaticText(_("Exit"))
self["key_green"] = StaticText(_("Add"))
self["key_yellow"] = StaticText(_("Edit"))
self["key_blue"] = StaticText(_("Delete"))
self["key_0"] = StaticText(_("Set as Startpage"))
self.mBookmarkList = self.setBookmarkList()
self["bookmarklist"] = MenuList(self.mBookmarkList)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Bookmark'))
def setBookmarkList(self):
l = []
#self.mBookmarkManager.dump()
cd = self.mBookmarkManager.getBookmarkRoot()
for ck in cd.iterkeys():
l.append(('# ' + cd[ck].mName, cd[ck]))
bd = cd[ck].mBookmarks
for bk in bd.iterkeys():
l.append((' - ' + bd[bk].mTitle, bd[bk]))
return l
def updateBookmarkList(self):
self.mBookmarkList = self.setBookmarkList()
self["bookmarklist"].setList(self.mBookmarkList)
def cbEditWindow(self, ret=False):
if not ret:
return
self.updateBookmarkList()
def getParentCategory(self):
idx = self["bookmarklist"].getSelectedIndex()
try:
while idx >= 0:
data = self.mBookmarkList[idx][0].strip()
if data[0] == '#':
return self.mBookmarkList[idx][1]
idx -= 1
except: pass
return None
def isCategoryItem(self):
try:
head = self["bookmarklist"].getCurrent()[0].strip()
if head[0] == '#':
return True
except: pass
return False
def keyNumber(self):
data = self["bookmarklist"].getCurrent()[1]
if strIsEmpty(data.mUrl):
msg = _("Invalid URL. Please check again!!")
self.mSession.open(MessageBox, msg, MessageBox.TYPE_INFO)
return
def cbSetStartpage(ret=None):
if ret is None: return
if ret:
data = self["bookmarklist"].getCurrent()[1]
OperaBrowserSetting().setData(data.mUrl, data.mType)
msg = _("Do you want to set selected url to the Startpage?")
self.mSession.openWithCallback(cbSetStartpage, MessageBox, msg, MessageBox.TYPE_YESNO, default=True)
def keyGreen(self):
def cbGreen(data):
if data is None:
return
if data[1] == 1:
parent = self.getParentCategory()
if parent is None:
return
if strIsEmpty(self.mTitle):
return
retAdd = self.mBookmarkManager.addBookmark(self.mTitle, self.mUrl, parent.mId, 0)
if not retAdd:
msg = _("Current page is already exist.")
self.mSession.open(MessageBox, msg, MessageBox.TYPE_INFO)
self.cbEditWindow(True)
elif data[1] == 2:
parent = self.getParentCategory()
if parent is None:
return
b = BookmarkData(0, '', '', parent.mId, 0)
self.mSession.openWithCallback(self.cbEditWindow, BookmarkEditWindow, _('Add'), BookmarkEditWindow.BOOKMARK, b, self.mBookmarkManager)
elif data[1] == 3:
c = CategoryData(0, '')
self.mSession.openWithCallback(self.cbEditWindow, BookmarkEditWindow, _('Add'), BookmarkEditWindow.CATEGORY, c, self.mBookmarkManager)
if strIsEmpty(self.mUrl):
l = [(_('Direct Input(Bookmark)'),2,), (_('Direct Input(Category)'),3,)]
else: l = [(_('Currentpage(Bookmark)'),1,), (_('Direct Input(Bookmark)'),2,), (_('Direct Input(Category)'),3,)]
self.mSession.openWithCallback(cbGreen, ChoiceBox, title=_("Please choose."), list=l)
def keyYellow(self):
data = self["bookmarklist"].getCurrent()[1]
if self.isCategoryItem():
self.mSession.openWithCallback(self.cbEditWindow, BookmarkEditWindow, _('Edit'), BookmarkEditWindow.CATEGORY, data, self.mBookmarkManager)
else: self.mSession.openWithCallback(self.cbEditWindow, BookmarkEditWindow, _('Edit'), BookmarkEditWindow.BOOKMARK, data, self.mBookmarkManager)
def keyBlue(self):
def cbBlue(ret=None):
if not ret: return
data = self["bookmarklist"].getCurrent()[1]
if self.isCategoryItem():
self.mBookmarkManager.deleteCategory(data.mId)
else: self.mBookmarkManager.deleteBookmark(data.mId)
self.updateBookmarkList()
if self.isCategoryItem():
msg = _("Do you want to delete the category and the bookmarks?")
else: msg = _("Do you want to delete the bookmark?")
self.mSession.openWithCallback(cbBlue, MessageBox, msg, MessageBox.TYPE_YESNO, default=True)
def keyOK(self):
if self.isCategoryItem(): return
data = self["bookmarklist"].getCurrent()[1]
url = data.mUrl.strip()
if len(url) == 0:
self.session.open(MessageBox, _("Can't open selected bookmark.\n - URL data is empty!!"), type = MessageBox.TYPE_INFO)
return
mode = data.mType
if mode:
lang = language.getLanguage()
if lang == 'ru_RU' and os.path.exists('/usr/local/manual/ru_RU'):
url = '/usr/local/manual/ru_RU/main.html'
elif lang == 'de_DE' and os.path.exists('/usr/local/manual/de_DE'):
url = '/usr/local/manual/de_DE/main.html'
self.close((url, mode))
def keyRed(self):
self.keyCancel()
def keyCancel(self):
self.close()
class BrowserHelpWindow(Screen, HelpableScreen):
MODE_GLOBAL,MODE_KEYBOARD,MODE_MOUSE = 1,2,3
skin = """
<screen name="BrowserHelpWindow" position="center,center" size="600,40" title="Browser Help" >
<ePixmap pixmap="skin_default/buttons/red.png" position="5,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="155,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="305,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="450,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="5,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" foregroundColor="#ffffff" transparent="1" />
<widget source="key_green" render="Label" position="155,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" foregroundColor="#ffffff" transparent="1" />
<widget source="key_yellow" render="Label" position="305,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" foregroundColor="#ffffff" transparent="1" />
<widget source="key_blue" render="Label" position="450,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" foregroundColor="#ffffff" transparent="1" />
</screen>
"""
def __init__(self, session):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self["key_red"] = StaticText(_("Exit"))
self["key_green"] = StaticText(_("Global"))
self["key_yellow"] = StaticText(_("Mouse"))
self["key_blue"] = StaticText(_("Keyboard"))
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions","ColorActions"], {
"ok" : self.keyRed,
"cancel": self.keyRed,
"red" : self.keyRed,
"green" : self.keyGreen,
"yellow": self.keyYellow,
"blue" : self.keyBlue,
},-2)
self.showHelpTimer = eTimer()
self.showHelpTimer.callback.append(self.cbShowHelpTimerClosed)
self.showHelpTimer.start(500)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Browser Help'))
def cbShowHelpTimerClosed(self):
self.showHelpTimer.stop()
self.setHelpModeActions(self.MODE_GLOBAL)
def setHelpModeActions(self, _mode=0):
self.helpList = []
if _mode == self.MODE_GLOBAL:
self["OkCancelActions"] = HelpableActionMap(self, "OkCancelActions", {
"cancel" : (self.keyPass, _("Exit the Opera browser.")),
})
self["MenuActions"] = HelpableActionMap(self, "MenuActions", {
"menu" : (self.keyPass, _("Show the Menu window.")),
})
self["ColorActions"] = HelpableActionMap(self, "ColorActions", {
"green" : (self.keyPass, _("Enter Key")),
"yellow" : (self.keyPass, _("Show the Virtual keyboard window.")),
"blue" : (self.keyPass, _("Backspace Key")),
})
self["EPGSelectActions"] = HelpableActionMap(self, "EPGSelectActions", {
"info" : (self.keyPass, _("Switch to keyboard/mouse mode.")),
})
elif _mode == self.MODE_MOUSE:
self["DirectionActions"] = HelpableActionMap(self, "DirectionActions", {
"up" : (self.keyPass, _("It will move the mouse pointer up.")),
"down" : (self.keyPass, _("It will move the mouse pointer down.")),
"left" : (self.keyPass, _("It will move the mouse pointer left.")),
"right" : (self.keyPass, _("It will move the mouse pointer right.")),
})
self["OkCancelActions"] = HelpableActionMap(self, "OkCancelActions", {
"ok" : (self.keyPass, _("Left Mouse Button")),
})
self["EPGSelectActions"] = HelpableActionMap(self, "EPGSelectActions", {
"nextBouquet" : (self.keyPass, _("Right Mouse Button")),
"nextService" : (self.keyPass, _("Left Key")),
"prevService" : (self.keyPass, _("Right Key")),
})
elif _mode == self.MODE_KEYBOARD:
self["DirectionActions"] = HelpableActionMap(self, "DirectionActions", {
"up" : (self.keyPass, _("Up Key")),
"down" : (self.keyPass, _("Down Key")),
"left" : (self.keyPass, _("Left Key")),
"right" : (self.keyPass, _("Right Key")),
})
self["OkCancelActions"] = HelpableActionMap(self, "OkCancelActions", {
"ok" : (self.keyPass, _("Enter Key")),
})
self["EPGSelectActions"] = HelpableActionMap(self, "EPGSelectActions", {
"nextBouquet" : (self.keyPass, _("PageUp Key")),
"prevBouquet" : (self.keyPass, _("PageDown Key")),
"nextService" : (self.keyPass, _("Go to previous page.")),
"prevService" : (self.keyPass, _("Go to next page.")),
})
if _mode > 0:
self.showHelp()
def keyPass(self):
pass
def keyRed(self):
self.close()
def keyGreen(self):
self.setHelpModeActions(self.MODE_GLOBAL)
def keyYellow(self):
self.setHelpModeActions(self.MODE_MOUSE)
def keyBlue(self):
self.setHelpModeActions(self.MODE_KEYBOARD)
class OperaBrowser(Screen):
MENUBAR_ITEM_WIDTH = 150
MENUBAR_ITEM_HEIGHT = 30
SUBMENULIST_WIDTH = 200
SUBMENULIST_HEIGHT = 25
SUBMENULIST_NEXT = 2
skin = """
<screen name="Opera Browser" position="0,0" size="1280,720" backgroundColor="transparent" flags="wfNoBorder" title="Opera Browser">
<widget name="topArea" zPosition="-1" position="0,0" size="1280,60" font="Regular;20" valign="center" halign="center" backgroundColor="#000000" />
<widget name="menuitemFile" position="30,20" size="150,30" font="Regular;20" valign="center" halign="center" backgroundColor="#000000" foregroundColors="#9f1313,#a08500" />
<widget name="menuitemTool" position="180,20" size="150,30" font="Regular;20" valign="center" halign="center" backgroundColor="#000000" foregroundColors="#9f1313,#a08500" />
<widget name="menuitemHelp" position="330,20" size="150,30" font="Regular;20" valign="center" halign="center" backgroundColor="#000000" foregroundColors="#9f1313,#a08500" />
<widget name="menulist" position="50,%d" size="%d,150" backgroundColor="#000000" zPosition="10" scrollbarMode="showOnDemand" />
<widget name="submenulist" position="%d,%d" size="%d,150" backgroundColor="#000000" zPosition="10" scrollbarMode="showOnDemand" />
<widget name="bottomArea" position="0,640" size="1280,80" font="Regular;20" valign="center" halign="center" backgroundColor="#000000" />
</screen>
""" % (MENUBAR_ITEM_HEIGHT+30, SUBMENULIST_WIDTH, SUBMENULIST_WIDTH+50+SUBMENULIST_NEXT, MENUBAR_ITEM_HEIGHT+30, SUBMENULIST_WIDTH)# modify menu
MENUITEMS_LIST =[[(_('Open Startpage'), None), (_('Open URL'), None), (_('Start/Stop'),None), (_('Exit'), None)],
[(_('Bookmark'), None), (_('Preference'), None)],
[(_('About'), None), (_('Help'), None)]]
def __init__(self, session, url=None):
Screen.__init__(self, session)
self["actions"] = ActionMap(["DirectionActions", "MenuActions", "OkCancelActions"], {
"cancel" : self.keyCancel
,"ok" : self.keyOK
,"left" : self.keyLeft
,"right" : self.keyRight
,"up" : self.keyUp
,"down" : self.keyDown
,"menu" : self.keyMenu
}, -2)
self._terminatedBrowser = True
self._enableKeyEvent = True
self._currentPageUrl = None
self._currentPageTitle = None
self.menubarCurrentIndex = 0
self.lvMenuItems = []
self.lvSubMenuItems = []
self["topArea"] = Label()
self["bottomArea"] = Label()
self["menuitemFile"] = MultiColorLabel()# modify menu
self["menuitemTool"] = MultiColorLabel()
self["menuitemHelp"] = MultiColorLabel()
self["menulist"] = MenuList(self.setListOnView())
self["submenulist"] = MenuList(self.setSubListOnView())
self.toggleMainScreenFlag = True
self.toggleListViewFlag = False
self.toggleSubListViewFlag = False
self.currentListView = self["menulist"]
self.onLayoutFinish.append(self.layoutFinished)
self._onCloseTimer = eTimer()
self._onCloseTimer.callback.append(self._cb_onClose)
self.paramUrl = url
def enableRCMouse(self, mode): #mode=[0|1]|[False|True]
rcmouse_path = "/proc/stb/fp/mouse"
if os.path.exists(rcmouse_path):
os.system("echo %d > %s" % (mode, rcmouse_path))
def layoutFinished(self):
self["menuitemFile"].setText(_("File"))# modify menu
self["menuitemTool"].setText(_("Tools"))
self["menuitemHelp"].setText(_("Help"))
self["menulist"].hide()
self["submenulist"].hide()
self["bottomArea"].setText(_("Opera Web Browser Plugin v1.0"))
self.setTitle(_("BrowserMain"))
self.selectMenuitem()
if self.paramUrl is not None:
self.keyMenu()
self.cbUrlText(self.paramUrl, 1)
def selectMenuitem(self):
tmp = [self["menuitemFile"], self["menuitemTool"], self["menuitemHelp"]]# modify menu
self["menuitemFile"].setForegroundColorNum(0)
self["menuitemTool"].setForegroundColorNum(0)
self["menuitemHelp"].setForegroundColorNum(0)
tmp[self.menubarCurrentIndex].setForegroundColorNum(1)
def popupCloseAll(self):
self.keyLeft()
self.keyLeft()
self.keyUp()
self.keyCancel()
def setListOnView(self):
l = self.MENUITEMS_LIST[self.menubarCurrentIndex]
if not self._terminatedBrowser and self.menubarCurrentIndex == 0: # running
l = [(_('Return'), None)]
self.lvMenuItems = l #self.MENUITEMS_LIST[self.menubarCurrentIndex]
return self.lvMenuItems
def setSubListOnView(self):
self.lvSubMenuItems = []
xl = self["menulist"].getCurrent()[1]
if xl is None: return []
for x in xl:
self.lvSubMenuItems.append((x,None))
return self.lvSubMenuItems
def toggleMainScreen(self):
if not self.toggleMainScreenFlag:
self.show()
else: self.hide()
self.toggleMainScreenFlag = not self.toggleMainScreenFlag
def toggleListView(self):
if not self.toggleListViewFlag:
self["menulist"].show()
else: self["menulist"].hide()
self.toggleListViewFlag = not self.toggleListViewFlag
def toggleSubListView(self):
if not self.toggleSubListViewFlag:
self["submenulist"].show()
else: self["submenulist"].hide()
self.toggleSubListViewFlag = not self.toggleSubListViewFlag
def setCurrentListView(self, listViewIdx):
if listViewIdx == 0:
self.currentListView = None
elif listViewIdx == 1:
self.currentListView = self["menulist"]
elif listViewIdx == 2:
self.currentListView = self["submenulist"]
def _cb_onClose(self):
self._onCloseTimer.stop()
command_server = getCommandServer()
try:
if self._on_close_window in command_server.onHbbTVCloseCB:
command_server.onHbbTVCloseCB.remove(self._on_close_window)
except Exception, ErrMsg: pass
try:
if self._on_setPageTitle in command_server.onSetPageTitleCB:
command_server.onSetPageTitleCB.remove(self._on_setPageTitle)
except Exception, ErrMsg: pass
self._on_setPageTitle(_('Opera Browser'))
self.enableRCMouse(False)
self.toggleMainScreen()
fbClass.getInstance().unlock()
eRCInput.getInstance().unlock()
self._terminatedBrowser = True
self._enableKeyEvent = True
#if not self.toggleListViewFlag:
# self.keyDown()
self._currentPageUrl = ''
if self.paramUrl is not None:
self.keyCancel()
else:
self.keyRight()
self.keyLeft()
def _on_setPageTitle(self, title=None):
print "Title :",title
if title is None:
return
self.setTitle(title)
def cbUrlText(self, data=None, mode=0):
print "Inputed Url :", data, mode
if strIsEmpty(data):
return
#self.hideSubmenu()
command_server = getCommandServer()
if self._on_setPageTitle not in command_server.onSetPageTitleCB:
command_server.onSetPageTitleCB.append(self._on_setPageTitle)
if self._on_close_window not in command_server.onHbbTVCloseCB:
command_server.onHbbTVCloseCB.append(self._on_close_window)
self.toggleMainScreen()
self.enableRCMouse(True)
fbClass.getInstance().lock()
eRCInput.getInstance().lock()
command_util = getCommandUtil()
command_util.sendCommand('OP_BROWSER_OPEN_URL', data, mode)
self._terminatedBrowser = False
self._enableKeyEvent = False
def _on_close_window(self):
self._onCloseTimer.start(1000)
def _cb_bookmarkWindowClosed(self, data=None):
if data is None:
return
(url, mode) = data
self.cbUrlText(url, mode)
def _cmd_on_OpenUrl(self):
global _g_helper
if not _g_helper._is_browser_running():
message = _("Opera Browser was not running.\nPlease running browser using [File]>[Start/Stop] menu.")
self.session.open(MessageBox, message, MessageBox.TYPE_INFO)
return
self.session.openWithCallback(self.cbUrlText, VirtualKeyBoard, title=(_("Please enter URL here")), text='http://')
def _cmd_on_About(self):
self.session.open(MessageBox, _('Opera Web Browser Plugin v1.0'), type = MessageBox.TYPE_INFO)
def _cmd_on_Exit(self):
self.close()
def _cmd_on_StartStop(self):
global _g_helper
if _g_helper is None:
return
_g_helper.showBrowserConfigBox()
def _cmd_on_Bookmark(self):
url = self._currentPageUrl
if url is None:
url = ''
title = self._currentPageTitle
if title is None:
title = ''
self.session.openWithCallback(self._cb_bookmarkWindowClosed, OperaBrowserBookmarkWindow, url, title)
def _cmd_on_Preference(self):
url = self._currentPageUrl
if url is None:
url = ''
self.session.open(OperaBrowserPreferenceWindow, url)
def _cmd_on_OpenStartpage(self):
global _g_helper
if not _g_helper._is_browser_running():
message = _("Opera Browser was not running.\nPlease running browser using [File]>[Start/Stop] menu.")
self.session.open(MessageBox, message, MessageBox.TYPE_INFO)
return
mode = 0
start = 'http://vuplus.com'
try:
d = OperaBrowserSetting().getData()
start = d['start']
mode = d['type']
except: pass
self.cbUrlText(start, mode)
def _cmd_on_ReturnToBrowser(self):
self.keyCancel()
def _cmd_on_Help(self):
self.session.open(BrowserHelpWindow)
def doCommand(self, command):
# modify menu
cmd_map = {}
cmd_map[_('Exit')] = self._cmd_on_Exit
cmd_map[_('Help')] = self._cmd_on_Help
cmd_map[_('About')] = self._cmd_on_About
cmd_map[_('Open URL')] = self._cmd_on_OpenUrl
cmd_map[_('Start/Stop')] = self._cmd_on_StartStop
cmd_map[_('Bookmark')] = self._cmd_on_Bookmark
cmd_map[_('Preference')] = self._cmd_on_Preference
cmd_map[_('Return')] = self._cmd_on_ReturnToBrowser
cmd_map[_('Open Startpage')] = self._cmd_on_OpenStartpage
try:
cmd_map[command]()
except Exception, ErrMsg: print ErrMsg
def keyOK(self):
if not self.toggleListViewFlag:
self.keyDown()
return
if self.currentListView.getCurrent()[1] is None:
self.doCommand(self.currentListView.getCurrent()[0])
#self.session.open(MessageBox, _(self.currentListView.getCurrent()[0]), type = MessageBox.TYPE_INFO)
return
self.keyRight()
def updateSelectedMenuitem(self, status):
if self.menubarCurrentIndex == 0 and status < 0:
self.menubarCurrentIndex = 2 # modify menu
elif self.menubarCurrentIndex == 2 and status > 0: # modify menu
self.menubarCurrentIndex = 0
else: self.menubarCurrentIndex += status
self.selectMenuitem()
def keyLeft(self):
if not self.toggleMainScreenFlag:
return
if not self.toggleListViewFlag:
self.updateSelectedMenuitem(-1)
return
if self.toggleSubListViewFlag:
self.setCurrentListView(1)
self.toggleSubListView()
return
#if self.currentListView.getSelectedIndex():
self.currentListView.pageUp()
self.keyUp()
self.keyLeft()
self.keyDown()
def keyRight(self):
if not self.toggleMainScreenFlag:
return
if not self.toggleListViewFlag:
self.updateSelectedMenuitem(1)
return
if self.currentListView is None:
return
if self.currentListView.getCurrent()[1] is not None:
parentSelectedIndex = self.currentListView.getSelectedIndex()
self.setCurrentListView(2)
self.currentListView.setList(self.setSubListOnView())
self.currentListView.resize(self.SUBMENULIST_WIDTH, self.SUBMENULIST_HEIGHT*len(self.lvSubMenuItems)+5)
self.currentListView.move(self.MENUBAR_ITEM_WIDTH*self.menubarCurrentIndex + self.SUBMENULIST_WIDTH+self.SUBMENULIST_NEXT + 50,self.MENUBAR_ITEM_HEIGHT+30+(parentSelectedIndex*self.SUBMENULIST_HEIGHT))
self.toggleSubListView()
return
self.currentListView.pageUp()
self.keyUp()
self.keyRight()
self.keyDown()
def keyDown(self):
if not self.toggleMainScreenFlag:
return
if self.currentListView is None:
return
if not self.toggleListViewFlag:
self.currentListView.setList(self.setListOnView())
self.currentListView.resize(self.SUBMENULIST_WIDTH, self.SUBMENULIST_HEIGHT*len(self.lvMenuItems)+5)
self.currentListView.move(self.MENUBAR_ITEM_WIDTH*self.menubarCurrentIndex+1+ 50,self.MENUBAR_ITEM_HEIGHT+30)
self.toggleListView()
return
self.currentListView.down()
def keyUp(self):
if not self.toggleMainScreenFlag:
return
if self.currentListView is None:
return
if self.currentListView == self["menulist"]:
if self.currentListView.getSelectedIndex() == 0:
self.toggleListView()
return
self.currentListView.up()
def keyCancel(self):
if not self._terminatedBrowser:
#self._session.openWithCallback(self._cb_virtualKeyboardClosed, VirtualKeyBoard, title=("Please enter URL here"), text="")
fbClass.getInstance().lock()
eRCInput.getInstance().lock()
if self.toggleListViewFlag:
self.toggleMainScreen()
self._currentPageUrl = None
self._currentPageTitle = None
command_util = getCommandUtil()
command_util.sendCommand('OP_BROWSER_MENU_RES')
return
self.close()
def keyMenu(self):
self.toggleMainScreen()
def setCurrentPageUrl(self, url, title=None):
self._currentPageUrl = url
if title is None:
idx = len(url)
if idx > 10: idx = 10
title = url[:idx]
self._currentPageTitle = title
print self._currentPageUrl
self.toggleMainScreen()
self.hideSubmenu()
self.keyDown()
def hideSubmenu(self):
self.currentListView.pageUp()
self.keyUp()
def auto_start_main(reason, **kwargs):
if reason:
command_server = getCommandServer()
command_server.stop()
from Screens.HelpMenu import HelpableScreen
def session_start_main(session, reason, **kwargs):
fbClass.getInstance().unlock()
eRCInput.getInstance().unlock()
global _g_helper
_g_helper = session.open(HbbTVHelper)
HelpableScreen.__init__ = HelpableScreen__init__
HelpableScreen.session = session
def HelpableScreen__init__(self):
if isinstance(self, HelpableScreen):
HelpableScreen.showManual = showManual
self["helpActions"] = ActionMap(["HelpbuttonActions"], {
"help_b" : self.showHelp,
"help_l" : self.showManual,
}, -2)
_g_clearBrowserDataTimer = eTimer()
def showManual(self):
if not os.path.exists('/usr/local/manual'):
return
url = 'file:///usr/local/manual/main.html'
lang = language.getLanguage()
if lang == 'ru_RU' and os.path.exists('/usr/local/manual/ru_RU'):
url = 'file:///usr/local/manual/ru_RU/main.html'
elif lang == 'de_DE' and os.path.exists('/usr/local/manual/de_DE'):
url = 'file:///usr/local/manual/de_DE/main.html'
def _do_clean():
_g_clearBrowserDataTimer.stop()
try: _g_clearBrowserDataTimer.callback.remove(_do_clean)
except: pass
setPluginBrowser(None)
def clearBrowserData():
_g_clearBrowserDataTimer.callback.append(_do_clean)
_g_clearBrowserDataTimer.start(50)
setPluginBrowser(self.session.openWithCallback(clearBrowserData, OperaBrowser, url))
def plugin_start_main(session, **kwargs):
#session.open(OperaBrowser)
def _do_clean():
_g_clearBrowserDataTimer.stop()
try: _g_clearBrowserDataTimer.callback.remove(_do_clean)
except: pass
setPluginBrowser(None)
def clearBrowserData():
_g_clearBrowserDataTimer.callback.append(_do_clean)
_g_clearBrowserDataTimer.start(50)
setPluginBrowser(session.openWithCallback(clearBrowserData, OperaBrowser))
def plugin_extension_start_application(session, **kwargs):
global _g_helper
if _g_helper is None:
return
_g_helper.showApplicationSelectionBox()
def plugin_extension_browser_config(session, **kwargs):
global _g_helper
if _g_helper is None:
return
_g_helper.showBrowserConfigBox()
def Plugins(path, **kwargs):
l = []
l.append(PluginDescriptor(where=PluginDescriptor.WHERE_AUTOSTART, fnc=auto_start_main))
l.append(PluginDescriptor(where=PluginDescriptor.WHERE_SESSIONSTART, needsRestart=True, fnc=session_start_main, weight=-10))
l.append(PluginDescriptor(name=_("HbbTV Applications"), where=PluginDescriptor.WHERE_EXTENSIONSMENU, needsRestart=True, fnc=plugin_extension_start_application))
l.append(PluginDescriptor(name=_("Browser Start/Stop"), where=PluginDescriptor.WHERE_EXTENSIONSMENU, needsRestart=True, fnc=plugin_extension_browser_config))
l.append(PluginDescriptor(name=_("Opera Web Browser"), description=_("start opera web browser"), where=PluginDescriptor.WHERE_PLUGINMENU, needsRestart=True, fnc=plugin_start_main))
return l
|
ThatRfernand/or-tools | refs/heads/master | examples/python/blending.py | 5 | # Copyright 2011 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Blending problem in Google or-tools.
From the OPL model blending.mod.
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from __future__ import print_function
import sys
from ortools.linear_solver import pywraplp
def main(sol='CBC'):
# Create the solver.
print('Solver: ', sol)
# using GLPK
if sol == 'GLPK':
solver = pywraplp.Solver('CoinsGridGLPK',
pywraplp.Solver.GLPK_MIXED_INTEGER_PROGRAMMING)
else:
# Using CBC
solver = pywraplp.Solver('CoinsGridCBC',
pywraplp.Solver.CBC_MIXED_INTEGER_PROGRAMMING)
#
# data
#
NbMetals = 3
NbRaw = 2
NbScrap = 2
NbIngo = 1
Metals = list(range(NbMetals))
Raws = list(range(NbRaw))
Scraps = list(range(NbScrap))
Ingos = list(range(NbIngo))
CostMetal = [22, 10, 13]
CostRaw = [6, 5]
CostScrap = [7, 8]
CostIngo = [9]
Low = [0.05, 0.30, 0.60]
Up = [0.10, 0.40, 0.80]
PercRaw = [[0.20, 0.01], [0.05, 0], [0.05, 0.30]]
PercScrap = [[0, 0.01], [0.60, 0], [0.40, 0.70]]
PercIngo = [[0.10], [0.45], [0.45]]
Alloy = 71
#
# variables
#
p = [solver.NumVar(0, solver.Infinity(), 'p[%i]' % i) for i in Metals]
r = [solver.NumVar(0, solver.Infinity(), 'r[%i]' % i) for i in Raws]
s = [solver.NumVar(0, solver.Infinity(), 's[%i]' % i) for i in Scraps]
ii = [solver.IntVar(0, solver.Infinity(), 'ii[%i]' % i) for i in Ingos]
metal = [solver.NumVar(Low[j] * Alloy, Up[j] * Alloy, 'metal[%i]' % j)
for j in Metals]
z = solver.NumVar(0, solver.Infinity(), 'z')
#
# constraints
#
solver.Add(z ==
solver.Sum([CostMetal[i] * p[i] for i in Metals]) +
solver.Sum([CostRaw[i] * r[i] for i in Raws]) +
solver.Sum([CostScrap[i] * s[i] for i in Scraps]) +
solver.Sum([CostIngo[i] * ii[i] for i in Ingos]))
for j in Metals:
solver.Add(
metal[j] == p[j] +
solver.Sum([PercRaw[j][k] * r[k] for k in Raws]) +
solver.Sum([PercScrap[j][k] * s[k] for k in Scraps]) +
solver.Sum([PercIngo[j][k] * ii[k] for k in Ingos]))
solver.Add(solver.Sum(metal) == Alloy)
objective = solver.Minimize(z)
#
# solution and search
#
solver.Solve()
print()
print('z = ', solver.Objective().Value())
print('Metals')
for i in Metals:
print(p[i].SolutionValue(), end=' ')
print()
print('Raws')
for i in Raws:
print(r[i].SolutionValue(), end=' ')
print()
print('Scraps')
for i in Scraps:
print(s[i].SolutionValue(), end=' ')
print()
print('Ingos')
for i in Ingos:
print(ii[i].SolutionValue(), end=' ')
print()
print('Metals')
for i in Metals:
print(metal[i].SolutionValue(), end=' ')
print()
print()
print('walltime :', solver.WallTime(), 'ms')
if sol == 'CBC':
print('iterations:', solver.Iterations())
if __name__ == '__main__':
sol = 'CBC'
if len(sys.argv) > 1:
sol = sys.argv[1]
if sol != 'GLPK' and sol != 'CBC':
print('Solver must be either GLPK or CBC')
sys.exit(1)
main(sol)
|
jstoxrocky/statsmodels | refs/heads/master | statsmodels/examples/ex_kernel_singleindex_dgp.py | 34 | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 06 09:50:54 2013
Author: Josef Perktold
"""
from __future__ import print_function
if __name__ == '__main__':
import numpy as np
import matplotlib.pyplot as plt
#from statsmodels.nonparametric.api import KernelReg
import statsmodels.sandbox.nonparametric.kernel_extras as smke
import statsmodels.sandbox.nonparametric.dgp_examples as dgp
class UnivariateFunc1a(dgp.UnivariateFunc1):
def het_scale(self, x):
return 0.5
seed = np.random.randint(999999)
#seed = 430973
#seed = 47829
seed = 648456 #good seed for het_scale = 0.5
print(seed)
np.random.seed(seed)
nobs, k_vars = 300, 3
x = np.random.uniform(-2, 2, size=(nobs, k_vars))
xb = x.sum(1) / 3 #beta = [1,1,1]
funcs = [#dgp.UnivariateFanGijbels1(),
#dgp.UnivariateFanGijbels2(),
#dgp.UnivariateFanGijbels1EU(),
#dgp.UnivariateFanGijbels2(distr_x=stats.uniform(-2, 4))
UnivariateFunc1a(x=xb)
]
res = []
fig = plt.figure()
for i,func in enumerate(funcs):
#f = func()
f = func
# mod0 = smke.SingleIndexModel(endog=[f.y], exog=[xb], #reg_type='ll',
# var_type='c')#, bw='cv_ls')
# mean0, mfx0 = mod0.fit()
model = smke.SingleIndexModel(endog=[f.y], exog=x, #reg_type='ll',
var_type='ccc')#, bw='cv_ls')
mean, mfx = model.fit()
ax = fig.add_subplot(1, 1, i+1)
f.plot(ax=ax)
xb_est = np.dot(model.exog, model.b)
sortidx = np.argsort(xb_est) #f.x)
ax.plot(f.x[sortidx], mean[sortidx], 'o', color='r', lw=2, label='est. mean')
# ax.plot(f.x, mean0, color='g', lw=2, label='est. mean')
ax.legend(loc='upper left')
res.append((model, mean, mfx))
fig.suptitle('Kernel Regression')
fig.show()
alpha = 0.7
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot(f.x[sortidx], f.y[sortidx], 'o', color='b', lw=2, alpha=alpha, label='observed')
ax.plot(f.x[sortidx], f.y_true[sortidx], 'o', color='g', lw=2, alpha=alpha, label='dgp. mean')
ax.plot(f.x[sortidx], mean[sortidx], 'o', color='r', lw=2, alpha=alpha, label='est. mean')
ax.legend(loc='upper left')
fig = plt.figure()
# ax = fig.add_subplot(1, 2, 1)
# ax.plot(f.x, f.y, 'o', color='b', lw=2, alpha=alpha, label='observed')
# ax.plot(f.x, f.y_true, 'o', color='g', lw=2, alpha=alpha, label='dgp. mean')
# ax.plot(f.x, mean, 'o', color='r', lw=2, alpha=alpha, label='est. mean')
# ax.legend(loc='upper left')
sortidx0 = np.argsort(xb)
ax = fig.add_subplot(1, 2, 1)
ax.plot(f.y[sortidx0], 'o', color='b', lw=2, alpha=alpha, label='observed')
ax.plot(f.y_true[sortidx0], 'o', color='g', lw=2, alpha=alpha, label='dgp. mean')
ax.plot(mean[sortidx0], 'o', color='r', lw=2, alpha=alpha, label='est. mean')
ax.legend(loc='upper left')
ax.set_title('Single Index Model (sorted by true xb)')
ax = fig.add_subplot(1, 2, 2)
ax.plot(f.y[sortidx], 'o', color='b', lw=2, alpha=alpha, label='observed')
ax.plot(f.y_true[sortidx], 'o', color='g', lw=2, alpha=alpha, label='dgp. mean')
ax.plot(mean[sortidx], 'o', color='r', lw=2, alpha=alpha, label='est. mean')
ax.legend(loc='upper left')
ax.set_title('Single Index Model (sorted by estimated xb)')
plt.show()
|
namboy94/messengerbot | refs/heads/master | kudubot/db/config/impl/MySqlConfig.py | 2 | """LICENSE
Copyright 2015 Hermann Krumrey <hermann@krumreyh.com>
This file is part of kudubot.
kudubot is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
kudubot is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with kudubot. If not, see <http://www.gnu.org/licenses/>.
LICENSE"""
from kudubot.db.config.DbConfig import DbConfig
class MySqlConfig(DbConfig):
"""
Database configuration for MySQL
"""
def __init__(
self,
address: str,
port: str,
name: str,
user: str,
password: str,
):
"""
Initializes the database configuration
:param address: The database address
:param port: The database port
:param name: The database name
:param user: The database user
:param password: The database password
"""
self.address = address
self.port = port
self.name = name
self.user = user
self.password = password
def to_uri(self) -> str:
"""
Turns the configuration into an URI that SQLAlchemy can use
:return: The URI
"""
return "mysql://{}:{}@{}:{}/{}".format(
self.user, self.password, self.address, self.port, self.name
)
|
JioCloud/oslo.db | refs/heads/master | oslo/db/concurrency.py | 3 | # Copyright 2014 Mirantis.inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import logging
import threading
from oslo.config import cfg
from oslo.db._i18n import _LE
from oslo.db import api
LOG = logging.getLogger(__name__)
tpool_opts = [
cfg.BoolOpt('use_tpool',
default=False,
deprecated_name='dbapi_use_tpool',
deprecated_group='DEFAULT',
help='Enable the experimental use of thread pooling for '
'all DB API calls'),
]
class TpoolDbapiWrapper(object):
"""DB API wrapper class.
This wraps the oslo DB API with an option to be able to use eventlet's
thread pooling. Since the CONF variable may not be loaded at the time
this class is instantiated, we must look at it on the first DB API call.
"""
def __init__(self, conf, backend_mapping):
self._db_api = None
self._backend_mapping = backend_mapping
self._conf = conf
self._conf.register_opts(tpool_opts, 'database')
self._lock = threading.Lock()
@property
def _api(self):
if not self._db_api:
with self._lock:
if not self._db_api:
db_api = api.DBAPI.from_config(
conf=self._conf, backend_mapping=self._backend_mapping)
if self._conf.database.use_tpool:
try:
from eventlet import tpool
except ImportError:
LOG.exception(_LE("'eventlet' is required for "
"TpoolDbapiWrapper."))
raise
self._db_api = tpool.Proxy(db_api)
else:
self._db_api = db_api
return self._db_api
def __getattr__(self, key):
return getattr(self._api, key)
def list_opts():
"""Returns a list of oslo.config options available in this module.
:returns: a list of (group_name, opts) tuples
"""
return [('database', copy.deepcopy(tpool_opts))]
|
xq262144/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/django/core/management/commands/compilemessages.py | 98 | from __future__ import unicode_literals
import codecs
import os
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import find_command, popen_wrapper
from django.utils._os import npath
def has_bom(fn):
with open(fn, 'rb') as f:
sample = f.read(4)
return sample[:3] == b'\xef\xbb\xbf' or \
sample.startswith(codecs.BOM_UTF16_LE) or \
sample.startswith(codecs.BOM_UTF16_BE)
def compile_messages(stdout, locale=None):
program = 'msgfmt'
if find_command(program) is None:
raise CommandError("Can't find %s. Make sure you have GNU gettext tools 0.15 or newer installed." % program)
basedirs = [os.path.join('conf', 'locale'), 'locale']
if os.environ.get('DJANGO_SETTINGS_MODULE'):
from django.conf import settings
basedirs.extend(settings.LOCALE_PATHS)
# Gather existing directories.
basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
if not basedirs:
raise CommandError("This script should be run from the Django Git checkout or your project or app tree, or with the settings module specified.")
for basedir in basedirs:
if locale:
dirs = [os.path.join(basedir, l, 'LC_MESSAGES') for l in locale]
else:
dirs = [basedir]
for ldir in dirs:
for dirpath, dirnames, filenames in os.walk(ldir):
for f in filenames:
if not f.endswith('.po'):
continue
stdout.write('processing file %s in %s\n' % (f, dirpath))
fn = os.path.join(dirpath, f)
if has_bom(fn):
raise CommandError("The %s file has a BOM (Byte Order Mark). Django only supports .po files encoded in UTF-8 and without any BOM." % fn)
pf = os.path.splitext(fn)[0]
args = [program, '--check-format', '-o', npath(pf + '.mo'), npath(pf + '.po')]
output, errors, status = popen_wrapper(args)
if status:
if errors:
msg = "Execution of %s failed: %s" % (program, errors)
else:
msg = "Execution of %s failed" % program
raise CommandError(msg)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--locale', '-l', dest='locale', action='append',
help='locale(s) to process (e.g. de_AT). Default is to process all. Can be used multiple times.'),
)
help = 'Compiles .po files to .mo files for use with builtin gettext support.'
requires_model_validation = False
leave_locale_alone = True
def handle(self, **options):
locale = options.get('locale')
compile_messages(self.stdout, locale=locale)
|
tetsuo13/OpenDKIM-Rotate-Keys | refs/heads/master | odkim_rotate/dns/linode_provider.py | 1 | import os
import requests
from odkim_rotate.dns.provider import *
class LinodeDnsProvider(DnsProvider):
"""DNS provider for Linode.
Linode requires a domain ID when adding any DNS records. Rather than
enumerating all domains on every request to create a TXT record in order
to find the ID of the domain in use, all domains for the Linode account
are cached in memory so that subsequent requests are faster. This opens
the possibility of errors if the domain on Linode is deleted after its
information has been cached.
Full documentation on Linode API:
https://www.linode.com/api/
"""
api_key = ''
api_url = 'https://api.linode.com/'
domains = {}
def __init__(self):
if 'LINODE_API_KEY' not in os.environ:
raise KeyError('LINODE_API_KEY environment variable not set')
self.api_key = os.environ.get('LINODE_API_KEY')
self.domains = {}
def create_txt_record(self, domain, selector, value):
if not self.domains:
self.enumerate_domains()
if domain not in self.domains:
raise KeyError('Domain {} not found in Linode'.format(domain))
data = {
'api_action': 'domain.resource.create',
'DomainID': self.domains[domain],
'Type': 'TXT',
'Name': selector + '._domainkey',
'Target': value
}
r = self.send_request(data)
def enumerate_domains(self):
"""Cache all domains available along with their domain ID.
"""
data = {
'api_action': 'domain.list'
}
r = self.send_request({'api_action': 'domain.list'})
for domain in r['DATA']:
self.domains[domain['DOMAIN']] = domain['DOMAINID']
if len(self.domains) == 0:
raise RuntimeError('No domains found on Linode')
def send_request(self, data):
headers = {
'User-Agent': 'OpenDKIMRotateKeys/1.0.0'
}
data['api_key'] = self.api_key
r = requests.post(self.api_url, data=data, headers=headers).json()
if len(r['ERRORARRAY']) > 0:
messages = []
for error in r['ERRORARRAY']:
msg = '{} (code {})'.format(error['ERRORMESSAGE'],
error['ERRORCODE'])
messages.append(msg)
raise RuntimeError('Errors from Linode: {}'.format(', '.join(messages)))
return r
|
danalec/dotfiles | refs/heads/master | sublime/.config/sublime-text-3/Packages/anaconda_php/plugin/handlers_php/commands/phpcpd.py | 3 |
# Copyright (C) 2014 - Oscar Campos <oscar.campos@member.fsf.org>
# This program is Free Software see LICENSE file for details
import os
import sys
import logging
import traceback
import subprocess
from commands.base import Command
from process import spawn
PIPE = subprocess.PIPE
class PHPCPD(Command):
"""Run phpcs linter and return back results
"""
def __init__(self, callback, uid, vid, filename, settings, lint=False):
self.vid = vid
self.lint = lint
self.settings = settings
self.filename = filename
super(PHPCPD, self).__init__(callback, uid)
def run(self):
"""Run the command
"""
try:
output = self.php_copy_and_paste_detector()
callback_data = {
'success': True,
'uid': self.uid,
'vid': self.vid
}
callback_data['output' if not self.lint else 'errors'] = output
self.callback(callback_data)
except Exception as error:
logging.error(error)
logging.debug(traceback.format_exc())
print(traceback.format_exc())
self.callback({
'success': False,
'error': error,
'uid': self.uid,
'vid': self.vid
})
def php_copy_and_paste_detector(self):
"""Run the phpcpd command in a file or directory
"""
phpcpd = os.path.join(
os.path.dirname(__file__), '../linting/phpcpd/phpcpd.phar')
args = ['php', '-n', '-d', 'memory_limit=-1', phpcpd, self.filename]
configured_verbosity = self.settings.get('phpcpd_verbosity_level', 0)
if configured_verbosity > 0:
verbosity_lvl = '-{}'.format('v' * configured_verbosity)
args.append(verbosity_lvl)
args += [
'--min-lines', str(self.settings.get('phpcpd_min_lines', 5)),
'--min-tokens', str(self.settings.get('phpcpd_min_tokens', 70))
] + self.settings.get('phpcpd_additional_arguments', [])
proc = spawn(args, stdout=PIPE, stderr=PIPE, cwd=os.getcwd())
output, error = proc.communicate()
if sys.version_info >= (3, 0):
output = output.decode('utf8')
return output if not self.lint else self.to_lint_fmt(output)
def to_lint_fmt(self, data):
"""Prepare the output to linter format
"""
# treated as violations
errors = {'E': [], 'W': [], 'V': []}
splited_data = data.splitlines()
for i in range(len(splited_data)):
error_line = splited_data[i]
if ' -' in error_line:
from_line, to_line = self._from_line_to_line(error_line)
from_cp_line, to_cp_line = self._from_line_to_line(
splited_data[i + 1])
message = (
'copy & paste code block detected: code from lines {0} '
'to {1} is duplicated in {2} to {3}'.format(
from_line, to_line, from_cp_line, to_cp_line
)
)
line = from_line
errors['V'].append({
'line': line,
'offset': 0,
'code': 0,
'message': '[V] phpcpd: {0}'.format(message)
})
return errors
def _from_line_to_line(self, error_line):
"""Parse the error line and give back the lines range in a tuple
"""
return error_line.rsplit('.php:', 1)[1].split('-')
|
wakatime/sketch-wakatime | refs/heads/master | WakaTime.sketchplugin/Contents/Resources/wakatime/packages/pygments/lexers/other.py | 31 | # -*- coding: utf-8 -*-
"""
pygments.lexers.other
~~~~~~~~~~~~~~~~~~~~~
Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
TcshLexer
from pygments.lexers.robotframework import RobotFrameworkLexer
from pygments.lexers.testing import GherkinLexer
from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
from pygments.lexers.prolog import LogtalkLexer
from pygments.lexers.snobol import SnobolLexer
from pygments.lexers.rebol import RebolLexer
from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
from pygments.lexers.modeling import ModelicaLexer
from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
HybrisLexer
from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
AsymptoteLexer, PovrayLexer
from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
GoodDataCLLexer, MaqlLexer
from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
MscgenLexer, VGLLexer
from pygments.lexers.basic import CbmBasicV2Lexer
from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
from pygments.lexers.ecl import ECLLexer
from pygments.lexers.urbi import UrbiscriptLexer
from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
from pygments.lexers.installers import NSISLexer, RPMSpecLexer
from pygments.lexers.textedit import AwkLexer
from pygments.lexers.smv import NuSMVLexer
__all__ = []
|
dancerj/monthly-report | refs/heads/master | 2012/image201212/memberanalysis/unique_names.py | 3 | #!/usr/bin/python
# dump unique names count.
# use:
# ./unique_names.py < memberls.csv
import csv
import sys
csv_reader = csv.reader(sys.stdin)
usernames = {}
for row in csv_reader:
for column in row[2:]:
# each column is a username.
name = column.decode('utf-8')
usernames[name] = usernames.get(name, 0) + 1
#print usernames.encode('utf-8')
for user, count in sorted(usernames.viewitems(),
key=lambda k: -k[1]):
print user, count
|
kpespinosa/BuildingMachineLearningSystemsWithPython | refs/heads/master | ch03/noise_analysis.py | 24 | # This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
import sklearn.datasets
groups = [
'comp.graphics', 'comp.os.ms-windows.misc', 'comp.sys.ibm.pc.hardware',
'comp.sys.mac.hardware', 'comp.windows.x', 'sci.space']
train_data = sklearn.datasets.fetch_20newsgroups(subset="train",
categories=groups)
labels = train_data.target
num_clusters = 50 # sp.unique(labels).shape[0]
import nltk.stem
english_stemmer = nltk.stem.SnowballStemmer('english')
from sklearn.feature_extraction.text import TfidfVectorizer
class StemmedTfidfVectorizer(TfidfVectorizer):
def build_analyzer(self):
analyzer = super(TfidfVectorizer, self).build_analyzer()
return lambda doc: (english_stemmer.stem(w) for w in analyzer(doc))
vectorizer = StemmedTfidfVectorizer(min_df=10, max_df=0.5,
stop_words='english', decode_error='ignore'
)
vectorized = vectorizer.fit_transform(train_data.data)
post_group = zip(train_data.data, train_data.target)
# Create a list of tuples that can be sorted by
# the length of the posts
all = [(len(post[0]), post[0], train_data.target_names[post[1]])
for post in post_group]
graphics = sorted([post for post in all if post[2] == 'comp.graphics'])
print(graphics[5])
# (245, 'From: SITUNAYA@IBM3090.BHAM.AC.UK\nSubject: test....(sorry)\nOrganization:
# The University of Birmingham, United Kingdom\nLines: 1\nNNTP-Posting-Host: ibm3090.bham.ac.uk
# \n\n==============================================================================\n',
# 'comp.graphics')
noise_post = graphics[5][1]
analyzer = vectorizer.build_analyzer()
print(list(analyzer(noise_post)))
useful = set(analyzer(noise_post)).intersection(vectorizer.get_feature_names())
print(sorted(useful))
# ['ac', 'birmingham', 'host', 'kingdom', 'nntp', 'sorri', 'test', 'uk', 'unit', 'univers']
for term in sorted(useful):
print('IDF(%s)=%.2f' % (term,
vectorizer._tfidf.idf_[vectorizer.vocabulary_[term]]))
# IDF(ac)=3.51
# IDF(birmingham)=6.77
# IDF(host)=1.74
# IDF(kingdom)=6.68
# IDF(nntp)=1.77
# IDF(sorri)=4.14
# IDF(test)=3.83
# IDF(uk)=3.70
# IDF(unit)=4.42
# IDF(univers)=1.91
|
aborrero/pkg-rpmlint | refs/heads/master | TagsCheck.py | 1 | # -*- coding: utf-8 -*-
#############################################################################
# File : TagsCheck.py
# Package : rpmlint
# Author : Frederic Lepied
# Created on : Tue Sep 28 00:03:24 1999
# Purpose : Check a package to see if some rpm tags are present
#############################################################################
import calendar
import os
import re
import time
try:
from urlparse import urlparse
except ImportError: # Python 3
from urllib.parse import urlparse
import rpm
import AbstractCheck
import Config
import FilesCheck
from Filter import addDetails, printError, printInfo, printWarning
import Pkg
_use_enchant = Config.getOption("UseEnchant", None)
if _use_enchant or _use_enchant is None:
try:
import enchant
import enchant.checker
except ImportError:
enchant = None
else:
enchant = None
del _use_enchant
DEFAULT_VALID_LICENSES = (
# OSI approved licenses, http://www.opensource.org/licenses/ (unversioned,
# trailing "license" dropped based on fuzzy logic, and in well-known cases,
# the abbreviation used instead of the full name, but list kept sorted by
# the full name). Updated 2010-02-01.
'Academic Free License',
'Adaptive Public License',
'AGPLv3', # Affero GNU Public License
'AGPLv3+', # Affero GNU Public License
'Apache License',
'Apache Software License',
'Apple Public Source License',
'Artistic',
'Attribution Assurance License',
'BSD',
'Boost Software License',
'Computer Associates Trusted Open Source License',
'CDDL', # Common Development and Distribution License
'Common Public Attribution License',
'CUA Office Public License',
'EU DataGrid Software License',
'Eclipse Public License',
'Educational Community License',
'Eiffel Forum License',
'Entessa Public License',
'European Union Public License',
'Fair License',
'Frameworx License',
'GPLv1',
'GPLv1+',
'GPLv2',
'GPLv2+',
'GPLv3',
'GPLv3+',
'LGPLv2',
'LGPLv2+',
'LGPLv3',
'LGPLv3+',
'Historical Permission Notice and Disclaimer',
'IBM Public License',
'IPA Font License',
'ISC License',
'Lucent Public License',
'Microsoft Public License',
'Microsoft Reciprocal License',
'MirOS License',
'MIT',
'Motosoto License',
'MPL', # Mozilla Public License
'Multics License',
'NASA Open Source Agreement',
'Naumen Public License',
'Nethack General Public License',
'Nokia Open Source License',
'Non-profit Open Software License',
'NTP License',
'OCLC Research Public License',
'OFL', # Open Font License
'Open Group Test Suite License',
'Open Software License',
'PHP License',
'Python license', # CNRI Python License
'Python Software Foundation License',
'QPL', # Qt Public License
'RealNetworks Public Source License',
'Reciprocal Public License',
'Ricoh Source Code Public License',
'Simple Public License',
'Sleepycat License',
'Sun Public License',
'Sybase Open Watcom Public License',
'University of Illinois/NCSA Open Source License',
'Vovida Software License',
'W3C License',
'wxWindows Library License',
'X.Net License',
'Zope Public License',
'zlib/libpng License',
# Creative commons licenses, http://creativecommons.org/licenses/:
'Creative Commons Attribution',
'Creative Commons Attribution-NoDerivs',
'Creative Commons Attribution-NonCommercial-NoDerivs',
'Creative Commons Attribution-NonCommercial',
'Creative Commons Attribution-NonCommercial-ShareAlike',
'Creative Commons Attribution-ShareAlike',
# Others:
'Design Public License', # ???
'GFDL', # GNU Free Documentation License
'LaTeX Project Public License',
'OpenContent License',
'Open Publication License',
'Public Domain',
'Ruby License',
'SIL Open Font License',
# Non open source licences:
'Charityware',
'Commercial',
'Distributable',
'Freeware',
'Non-distributable',
'Proprietary',
'Shareware',
)
BAD_WORDS = {
'alot': 'a lot',
'accesnt': 'accent',
'accelleration': 'acceleration',
'accessable': 'accessible',
'accomodate': 'accommodate',
'acess': 'access',
'acording': 'according',
'additionaly': 'additionally',
'adress': 'address',
'adresses': 'addresses',
'adviced': 'advised',
'albumns': 'albums',
'alegorical': 'allegorical',
'algorith': 'algorithm',
'allpication': 'application',
'altough': 'although',
'alows': 'allows',
'amoung': 'among',
'amout': 'amount',
'analysator': 'analyzer',
'ang': 'and',
'appropiate': 'appropriate',
'arraival': 'arrival',
'artifical': 'artificial',
'artillary': 'artillery',
'attemps': 'attempts',
'automatize': 'automate',
'automatized': 'automated',
'automatizes': 'automates',
'auxilliary': 'auxiliary',
'availavility': 'availability',
'availble': 'available',
'avaliable': 'available',
'availiable': 'available',
'backgroud': 'background',
'baloons': 'balloons',
'becomming': 'becoming',
'becuase': 'because',
'cariage': 'carriage',
'challanges': 'challenges',
'changable': 'changeable',
'charachters': 'characters',
'charcter': 'character',
'choosen': 'chosen',
'colorfull': 'colorful',
'comand': 'command',
'commerical': 'commercial',
'comminucation': 'communication',
'commoditiy': 'commodity',
'compability': 'compatibility',
'compatability': 'compatibility',
'compatable': 'compatible',
'compatibiliy': 'compatibility',
'compatibilty': 'compatibility',
'compleatly': 'completely',
'complient': 'compliant',
'compres': 'compress',
'containes': 'contains',
'containts': 'contains',
'contence': 'contents',
'continous': 'continuous',
'contraints': 'constraints',
'convertor': 'converter',
'convinient': 'convenient',
'cryptocraphic': 'cryptographic',
'deamon': 'daemon',
'debians': 'Debian\'s',
'decompres': 'decompress',
'definate': 'definite',
'definately': 'definitely',
'dependancies': 'dependencies',
'dependancy': 'dependency',
'dependant': 'dependent',
'developement': 'development',
'developped': 'developed',
'deveolpment': 'development',
'devided': 'divided',
'dictionnary': 'dictionary',
'diplay': 'display',
'disapeared': 'disappeared',
'dissapears': 'disappears',
'documentaion': 'documentation',
'docuentation': 'documentation',
'documantation': 'documentation',
'dont': 'don\'t',
'easilly': 'easily',
'ecspecially': 'especially',
'edditable': 'editable',
'editting': 'editing',
'eletronic': 'electronic',
'enchanced': 'enhanced',
'encorporating': 'incorporating',
'enlightnment': 'enlightenment',
'enterily': 'entirely',
'enviroiment': 'environment',
'environement': 'environment',
'excellant': 'excellent',
'exlcude': 'exclude',
'exprimental': 'experimental',
'extention': 'extension',
'failuer': 'failure',
'familar': 'familiar',
'fatser': 'faster',
'fetaures': 'features',
'forse': 'force',
'fortan': 'fortran',
'framwork': 'framework',
'fuction': 'function',
'fuctions': 'functions',
'functionnality': 'functionality',
'functonality': 'functionality',
'functionaly': 'functionally',
'futhermore': 'furthermore',
'generiously': 'generously',
'grahical': 'graphical',
'grahpical': 'graphical',
'grapic': 'graphic',
'guage': 'gauge',
'halfs': 'halves',
'heirarchically': 'hierarchically',
'helpfull': 'helpful',
'hierachy': 'hierarchy',
'hierarchie': 'hierarchy',
'howver': 'however',
'implemantation': 'implementation',
'incomming': 'incoming',
'incompatabilities': 'incompatibilities',
'indended': 'intended',
'indendation': 'indentation',
'independant': 'independent',
'informatiom': 'information',
'initalize': 'initialize',
'inofficial': 'unofficial',
'integreated': 'integrated',
'integrety': 'integrity',
'integrey': 'integrity',
'intendet': 'intended',
'interchangable': 'interchangeable',
'intermittant': 'intermittent',
'jave': 'java',
'langage': 'language',
'langauage': 'language',
'langugage': 'language',
'lauch': 'launch',
'lesstiff': 'lesstif',
'libaries': 'libraries',
'licenceing': 'licencing',
'loggin': 'login',
'logile': 'logfile',
'loggging': 'logging',
'mandrivalinux': 'Mandriva Linux',
'maintainance': 'maintenance',
'maintainence': 'maintenance',
'makeing': 'making',
'managable': 'manageable',
'manoeuvering': 'maneuvering',
'ment': 'meant',
'modulues': 'modules',
'monochromo': 'monochrome',
'multidimensionnal': 'multidimensional',
'navagating': 'navigating',
'nead': 'need',
'neccesary': 'necessary',
'neccessary': 'necessary',
'necesary': 'necessary',
'nescessary': 'necessary',
'noticable': 'noticeable',
'optionnal': 'optional',
'orientied': 'oriented',
'pacakge': 'package',
'pachage': 'package',
'packacge': 'package',
'packege': 'package',
'packge': 'package',
'pakage': 'package',
'particularily': 'particularly',
'persistant': 'persistent',
'plattform': 'platform',
'ploting': 'plotting',
'posible': 'possible',
'powerfull': 'powerful',
'prefered': 'preferred',
'prefferably': 'preferably',
'prepaired': 'prepared',
'princliple': 'principle',
'priorty': 'priority',
'proccesors': 'processors',
'proces': 'process',
'processsing': 'processing',
'processessing': 'processing',
'progams': 'programs',
'programers': 'programmers',
'programm': 'program',
'programms': 'programs',
'promps': 'prompts',
'pronnounced': 'pronounced',
'prononciation': 'pronunciation',
'pronouce': 'pronounce',
'protcol': 'protocol',
'protocoll': 'protocol',
'recieve': 'receive',
'recieved': 'received',
'redircet': 'redirect',
'regulamentations': 'regulations',
'remoote': 'remote',
'repectively': 'respectively',
'replacments': 'replacements',
'requiere': 'require',
'runnning': 'running',
'safly': 'safely',
'savable': 'saveable',
'searchs': 'searches',
'separatly': 'separately',
'seperate': 'separate',
'seperately': 'separately',
'seperatly': 'separately',
'serveral': 'several',
'setts': 'sets',
'similiar': 'similar',
'simliar': 'similar',
'speach': 'speech',
'standart': 'standard',
'staically': 'statically',
'staticly': 'statically',
'succesful': 'successful',
'succesfully': 'successfully',
'suplied': 'supplied',
'suport': 'support',
'suppport': 'support',
'supportin': 'supporting',
'synchonized': 'synchronized',
'syncronize': 'synchronize',
'syncronizing': 'synchronizing',
'syncronus': 'synchronous',
'syste': 'system',
'sythesis': 'synthesis',
'taht': 'that',
'throught': 'through',
'useable': 'usable',
'usefull': 'useful',
'usera': 'users',
'usetnet': 'Usenet',
'utilites': 'utilities',
'utillities': 'utilities',
'utilties': 'utilities',
'utiltity': 'utility',
'utitlty': 'utility',
'variantions': 'variations',
'varient': 'variant',
'verson': 'version',
'vicefersa': 'vice-versa',
'yur': 'your',
'wheter': 'whether',
'wierd': 'weird',
'xwindows': 'X'
}
DEFAULT_INVALID_REQUIRES = ('^is$', '^not$', '^owned$', '^by$', '^any$',
'^package$', '^libsafe\.so\.')
VALID_GROUPS = Config.getOption('ValidGroups', None)
if VALID_GROUPS is None: # get defaults from rpm package only if it's not set
VALID_GROUPS = Pkg.get_default_valid_rpmgroups()
VALID_LICENSES = Config.getOption('ValidLicenses', DEFAULT_VALID_LICENSES)
INVALID_REQUIRES = map(re.compile, Config.getOption('InvalidRequires', DEFAULT_INVALID_REQUIRES))
packager_regex = re.compile(Config.getOption('Packager'))
changelog_version_regex = re.compile('[^>]([^ >]+)\s*$')
changelog_text_version_regex = re.compile('^\s*-\s*((\d+:)?[\w\.]+-[\w\.]+)')
release_ext = Config.getOption('ReleaseExtension')
extension_regex = release_ext and re.compile(release_ext)
use_version_in_changelog = Config.getOption('UseVersionInChangelog', True)
devel_number_regex = re.compile('(.*?)([0-9.]+)(_[0-9.]+)?-devel')
lib_devel_number_regex = re.compile('^lib(.*?)([0-9.]+)(_[0-9.]+)?-devel')
invalid_url_regex = re.compile(Config.getOption('InvalidURL'), re.IGNORECASE)
lib_package_regex = re.compile('(?:^(?:compat-)?lib.*?(\.so.*)?|libs?[\d-]*)$', re.IGNORECASE)
leading_space_regex = re.compile('^\s+')
license_regex = re.compile('\(([^)]+)\)|\s(?:and|or)\s')
invalid_version_regex = re.compile('([0-9](?:rc|alpha|beta|pre).*)', re.IGNORECASE)
# () are here for grouping purpose in the regexp
forbidden_words_regex = re.compile('(' + Config.getOption('ForbiddenWords') + ')', re.IGNORECASE)
valid_buildhost_regex = re.compile(Config.getOption('ValidBuildHost'))
use_epoch = Config.getOption('UseEpoch', False)
use_utf8 = Config.getOption('UseUTF8', Config.USEUTF8_DEFAULT)
max_line_len = Config.getOption('MaxLineLength', 79)
tag_regex = re.compile('^((?:Auto(?:Req|Prov|ReqProv)|Build(?:Arch(?:itectures)?|Root)|(?:Build)?Conflicts|(?:Build)?(?:Pre)?Requires|Copyright|(?:CVS|SVN)Id|Dist(?:ribution|Tag|URL)|DocDir|(?:Build)?Enhances|Epoch|Exclu(?:de|sive)(?:Arch|OS)|Group|Icon|License|Name|No(?:Patch|Source)|Obsoletes|Packager|Patch\d*|Prefix(?:es)?|Provides|(?:Build)?Recommends|Release|RHNPlatform|Serial|Source\d*|(?:Build)?Suggests|Summary|(?:Build)?Supplements|(?:Bug)?URL|Vendor|Version)(?:\([^)]+\))?:)\s*\S', re.IGNORECASE)
punct = '.,:;!?'
sentence_break_regex = re.compile(r'(^|[.:;!?])\s*$')
so_dep_regex = re.compile(r'\.so(\.[0-9a-zA-z]+)*(\([^)]*\))*$')
# we assume that no rpm packages existed before rpm itself existed...
oldest_changelog_timestamp = calendar.timegm(time.strptime("1995-01-01", "%Y-%m-%d"))
private_so_paths = set()
for path in ('%perl_archlib', '%perl_vendorarch', '%perl_sitearch',
'%python_sitearch', '%python2_sitearch', '%python3_sitearch',
'%ruby_sitearch', '%php_extdir'):
epath = rpm.expandMacro(path)
if epath and epath != path:
private_so_paths.add(epath)
private_so_paths.add(re.sub(r'/lib64(?=/|$)', '/lib', epath))
private_so_paths.add(re.sub(r'/lib(?=/|$)', '/lib64', epath))
_enchant_checkers = {}
def spell_check(pkg, str, fmt, lang, ignored):
dict_found = True
warned = set()
if enchant:
if lang == 'C':
lang = 'en_US'
checker = _enchant_checkers.get(lang)
if not checker and lang not in _enchant_checkers:
try:
checker = enchant.checker.SpellChecker(
lang, filters=[enchant.tokenize.EmailFilter,
enchant.tokenize.URLFilter,
enchant.tokenize.WikiWordFilter])
except enchant.DictNotFoundError:
printInfo(pkg, 'enchant-dictionary-not-found', lang)
pass
_enchant_checkers[lang] = checker
if checker:
# squeeze whitespace to ease leading context check
checker.set_text(re.sub(r'\s+', ' ', str))
if use_utf8:
uppername = Pkg.to_unicode(pkg.header[rpm.RPMTAG_NAME]).upper()
else:
uppername = pkg.name.upper()
upperparts = uppername.split('-')
if lang.startswith('en'):
ups = [x + "'S" for x in upperparts]
upperparts.extend(ups)
for err in checker:
# Skip already warned and ignored words
if err.word in warned or err.word in ignored:
continue
# Skip all capitalized words that do not start a sentence
if err.word[0].isupper() and not \
sentence_break_regex.search(checker.leading_context(3)):
continue
upperword = err.word.upper()
# Skip all uppercase words
if err.word == upperword:
continue
# Skip errors containing package name or equal to a
# "component" of it, case insensitively
if uppername in upperword or upperword in upperparts:
continue
# Work around enchant's digit tokenizing behavior:
# http://github.com/rfk/pyenchant/issues/issue/3
if checker.leading_context(1).isdigit() or \
checker.trailing_context(1).isdigit():
continue
# Warn and suggest
sug = ', '.join(checker.suggest()[:3])
if sug:
sug = '-> %s' % sug
printWarning(pkg, 'spelling-error', fmt % lang, err.word, sug)
warned.add(err.word)
else:
dict_found = False
if not enchant or not dict_found:
for seq in str.split():
for word in re.split('[^a-z]+', seq.lower()):
if len(word) == 0:
continue
correct = BAD_WORDS.get(word)
if not correct:
continue
if word[0] == '\'':
word = word[1:]
if word[-1] == '\'':
word = word[:-1]
if word in warned or word in ignored:
continue
printWarning(pkg, 'spelling-error', fmt % lang, word, '->',
correct)
warned.add(word)
class TagsCheck(AbstractCheck.AbstractCheck):
def __init__(self):
AbstractCheck.AbstractCheck.__init__(self, 'TagsCheck')
def _unexpanded_macros(self, pkg, tagname, value, is_url=False):
if not value:
return
if not isinstance(value, (list, tuple)):
value = [value]
for val in value:
for match in AbstractCheck.macro_regex.findall(val):
# Do not warn about %XX URL escapes
if is_url and re.match('^%[0-9A-F][0-9A-F]$', match, re.I):
continue
printWarning(pkg, 'unexpanded-macro', tagname, match)
def check(self, pkg):
packager = pkg[rpm.RPMTAG_PACKAGER]
if packager:
self._unexpanded_macros(pkg, 'Packager', packager)
if Config.getOption('Packager') and \
not packager_regex.search(packager):
printWarning(pkg, 'invalid-packager', packager)
else:
printError(pkg, 'no-packager-tag')
version = pkg[rpm.RPMTAG_VERSION]
if version:
self._unexpanded_macros(pkg, 'Version', version)
res = invalid_version_regex.search(version)
if res:
printError(pkg, 'invalid-version', version)
else:
printError(pkg, 'no-version-tag')
release = pkg[rpm.RPMTAG_RELEASE]
if release:
self._unexpanded_macros(pkg, 'Release', release)
if release_ext and not extension_regex.search(release):
printWarning(pkg, 'not-standard-release-extension', release)
else:
printError(pkg, 'no-release-tag')
epoch = pkg[rpm.RPMTAG_EPOCH]
if epoch is None:
if use_epoch:
printError(pkg, 'no-epoch-tag')
else:
if epoch > 99:
printWarning(pkg, 'unreasonable-epoch', epoch)
epoch = str(epoch)
if use_epoch:
for tag in ("obsoletes", "conflicts", "provides", "recommends",
"suggests", "enhances", "supplements"):
for x in (x for x in getattr(pkg, tag)()
if x[1] and x[2][0] is None):
printWarning(pkg, 'no-epoch-in-%s' % tag,
Pkg.formatRequire(*x))
name = pkg.name
deps = pkg.requires() + pkg.prereq()
devel_depend = False
is_devel = FilesCheck.devel_regex.search(name)
is_source = pkg.isSource()
for d in deps:
value = Pkg.formatRequire(*d)
if use_epoch and d[1] and d[2][0] is None and \
not d[0].startswith('rpmlib('):
printWarning(pkg, 'no-epoch-in-dependency', value)
for r in INVALID_REQUIRES:
if r.search(d[0]):
printError(pkg, 'invalid-dependency', d[0])
if d[0].startswith('/usr/local/'):
printError(pkg, 'invalid-dependency', d[0])
if is_source:
if lib_devel_number_regex.search(d[0]):
printError(pkg, 'invalid-build-requires', d[0])
elif not is_devel:
if not devel_depend and FilesCheck.devel_regex.search(d[0]):
printError(pkg, 'devel-dependency', d[0])
devel_depend = True
if not d[1]:
res = lib_package_regex.search(d[0])
if res and not res.group(1):
printError(pkg, 'explicit-lib-dependency', d[0])
if d[1] == rpm.RPMSENSE_EQUAL and d[2][2] is not None:
printWarning(pkg, 'requires-on-release', value)
self._unexpanded_macros(pkg, 'dependency %s' % (value,), value)
self._unexpanded_macros(pkg, 'Name', name)
if not name:
printError(pkg, 'no-name-tag')
else:
if is_devel and not is_source:
base = is_devel.group(1)
dep = None
has_so = False
for fname in pkg.files():
if fname.endswith('.so'):
has_so = True
break
if has_so:
base_or_libs = base + '/' + base + '-libs/lib' + base
# try to match *%_isa as well (e.g. "(x86-64)", "(x86-32)")
base_or_libs_re = re.compile(
'^(lib)?%s(-libs)?(\(\w+-\d+\))?$' % re.escape(base))
for d in deps:
if base_or_libs_re.match(d[0]):
dep = d
break
if not dep:
printWarning(pkg, 'no-dependency-on', base_or_libs)
elif version:
exp = (epoch, version, None)
sexp = Pkg.versionToString(exp)
if not dep[1]:
printWarning(pkg, 'no-version-dependency-on',
base_or_libs, sexp)
elif dep[2][:2] != exp[:2]:
printWarning(pkg,
'incoherent-version-dependency-on',
base_or_libs,
Pkg.versionToString((dep[2][0],
dep[2][1], None)),
sexp)
res = devel_number_regex.search(name)
if not res:
printWarning(pkg, 'no-major-in-name', name)
else:
if res.group(3):
prov = res.group(1) + res.group(2) + '-devel'
else:
prov = res.group(1) + '-devel'
if prov not in (x[0] for x in pkg.provides()):
printWarning(pkg, 'no-provides', prov)
# List of words to ignore in spell check
ignored_words = set()
for pf in pkg.files():
ignored_words.update(pf.split('/'))
ignored_words.update((x[0] for x in pkg.provides()))
ignored_words.update((x[0] for x in pkg.requires()))
ignored_words.update((x[0] for x in pkg.conflicts()))
ignored_words.update((x[0] for x in pkg.obsoletes()))
langs = pkg[rpm.RPMTAG_HEADERI18NTABLE]
summary = pkg[rpm.RPMTAG_SUMMARY]
if summary:
if not langs:
self._unexpanded_macros(pkg, 'Summary', Pkg.b2s(summary))
else:
for lang in langs:
self.check_summary(pkg, lang, ignored_words)
else:
printError(pkg, 'no-summary-tag')
description = pkg[rpm.RPMTAG_DESCRIPTION]
if description:
if not langs:
self._unexpanded_macros(pkg, '%description',
Pkg.b2s(description))
else:
for lang in langs:
self.check_description(pkg, lang, ignored_words)
else:
printError(pkg, 'no-description-tag')
group = pkg[rpm.RPMTAG_GROUP]
self._unexpanded_macros(pkg, 'Group', group)
if not group:
printError(pkg, 'no-group-tag')
elif VALID_GROUPS and group not in VALID_GROUPS:
printWarning(pkg, 'non-standard-group', group)
buildhost = pkg[rpm.RPMTAG_BUILDHOST]
self._unexpanded_macros(pkg, 'BuildHost', buildhost)
if not buildhost:
printError(pkg, 'no-buildhost-tag')
elif Config.getOption('ValidBuildHost') and \
not valid_buildhost_regex.search(buildhost):
printWarning(pkg, 'invalid-buildhost', buildhost)
changelog = pkg[rpm.RPMTAG_CHANGELOGNAME]
if not changelog:
printError(pkg, 'no-changelogname-tag')
else:
clt = pkg[rpm.RPMTAG_CHANGELOGTEXT]
if use_version_in_changelog:
ret = changelog_version_regex.search(Pkg.b2s(changelog[0]))
if not ret and clt:
# we also allow the version specified as the first
# thing on the first line of the text
ret = changelog_text_version_regex.search(Pkg.b2s(clt[0]))
if not ret:
printWarning(pkg, 'no-version-in-last-changelog')
elif version and release:
srpm = pkg[rpm.RPMTAG_SOURCERPM] or ''
# only check when source name correspond to name
if srpm[0:-8] == '%s-%s-%s' % (name, version, release):
expected = [version + '-' + release]
if epoch is not None: # regardless of use_epoch
expected[0] = str(epoch) + ':' + expected[0]
# Allow EVR in changelog without release extension,
# the extension is often a macro or otherwise dynamic.
if release_ext:
expected.append(
extension_regex.sub('', expected[0]))
if ret.group(1) not in expected:
if len(expected) == 1:
expected = expected[0]
printWarning(pkg, 'incoherent-version-in-changelog',
ret.group(1), expected)
if use_utf8:
if clt:
changelog = changelog + clt
for s in changelog:
if not Pkg.is_utf8_bytestr(s):
printError(pkg, 'tag-not-utf8', '%changelog')
break
clt = pkg[rpm.RPMTAG_CHANGELOGTIME][0]
if clt:
clt -= clt % (24 * 3600) # roll back to 00:00:00, see #246
if clt < oldest_changelog_timestamp:
printWarning(pkg, 'changelog-time-overflow',
time.strftime("%Y-%m-%d", time.gmtime(clt)))
elif clt > time.time():
printError(pkg, 'changelog-time-in-future',
time.strftime("%Y-%m-%d", time.gmtime(clt)))
# for provide_name in (x[0] for x in pkg.provides()):
# if name == provide_name:
# printWarning(pkg, 'package-provides-itself')
# break
def split_license(license):
return (x.strip() for x in
(l for l in license_regex.split(license) if l))
rpm_license = pkg[rpm.RPMTAG_LICENSE]
if not rpm_license:
printError(pkg, 'no-license')
else:
valid_license = True
if rpm_license not in VALID_LICENSES:
for l1 in split_license(rpm_license):
if l1 in VALID_LICENSES:
continue
for l2 in split_license(l1):
if l2 not in VALID_LICENSES:
printWarning(pkg, 'invalid-license', l2)
valid_license = False
if not valid_license:
self._unexpanded_macros(pkg, 'License', rpm_license)
for tag in ('URL', 'DistURL', 'BugURL'):
if hasattr(rpm, 'RPMTAG_%s' % tag.upper()):
url = Pkg.b2s(pkg[getattr(rpm, 'RPMTAG_%s' % tag.upper())])
self._unexpanded_macros(pkg, tag, url, is_url=True)
if url:
(scheme, netloc) = urlparse(url)[0:2]
if not scheme or not netloc or "." not in netloc or \
scheme not in ('http', 'https', 'ftp') or \
(Config.getOption('InvalidURL') and
invalid_url_regex.search(url)):
printWarning(pkg, 'invalid-url', tag, url)
else:
self.check_url(pkg, tag, url)
elif tag == 'URL':
printWarning(pkg, 'no-url-tag')
obs_names = [x[0] for x in pkg.obsoletes()]
prov_names = [x[0] for x in pkg.provides()]
for o in (x for x in obs_names if x not in prov_names):
printWarning(pkg, 'obsolete-not-provided', o)
for o in pkg.obsoletes():
value = Pkg.formatRequire(*o)
self._unexpanded_macros(pkg, 'Obsoletes %s' % (value,), value)
# TODO: should take versions, <, <=, =, >=, > into account here
# https://bugzilla.redhat.com/460872
useless_provides = []
for p in prov_names:
if prov_names.count(p) != 1 and p not in useless_provides:
useless_provides.append(p)
for p in useless_provides:
printError(pkg, 'useless-provides', p)
for p in pkg.provides():
value = Pkg.formatRequire(*p)
self._unexpanded_macros(pkg, 'Provides %s' % (value,), value)
for c in pkg.conflicts():
value = Pkg.formatRequire(*c)
self._unexpanded_macros(pkg, 'Conflicts %s' % (value,), value)
obss = pkg.obsoletes()
if obss:
provs = pkg.provides()
for prov in provs:
for obs in obss:
if Pkg.rangeCompare(obs, prov):
printWarning(pkg, 'self-obsoletion',
'%s obsoletes %s' %
(Pkg.formatRequire(*obs),
Pkg.formatRequire(*prov)))
expfmt = rpm.expandMacro("%{_build_name_fmt}")
if pkg.isSource():
# _build_name_fmt often (always?) ends up not outputting src/nosrc
# as arch for source packages, do it ourselves
expfmt = re.sub(r'(?i)%\{?ARCH\b\}?', pkg.arch, expfmt)
expected = pkg.header.sprintf(expfmt).split("/")[-1]
basename = os.path.basename(pkg.filename)
if basename != expected:
printWarning(pkg, 'non-coherent-filename', basename, expected)
for tag in ('Distribution', 'DistTag', 'ExcludeArch', 'ExcludeOS',
'Vendor'):
if hasattr(rpm, 'RPMTAG_%s' % tag.upper()):
res = Pkg.b2s(pkg[getattr(rpm, 'RPMTAG_%s' % tag.upper())])
self._unexpanded_macros(pkg, tag, res)
for path in private_so_paths:
for fname, pkgfile in pkg.files().items():
if fname.startswith(path):
for prov in pkgfile.provides:
if so_dep_regex.search(prov[0]):
printWarning(pkg, "private-shared-object-provides",
fname, Pkg.formatRequire(*prov))
def check_description(self, pkg, lang, ignored_words):
description = pkg.langtag(rpm.RPMTAG_DESCRIPTION, lang)
if use_utf8:
if not Pkg.is_utf8_bytestr(description):
printError(pkg, 'tag-not-utf8', '%description', lang)
description = Pkg.to_unicode(description)
else:
description = Pkg.b2s(description)
self._unexpanded_macros(pkg, '%%description -l %s' % lang, description)
spell_check(pkg, description, '%%description -l %s', lang,
ignored_words)
for l in description.splitlines():
if len(l) > max_line_len:
printError(pkg, 'description-line-too-long', lang, l)
res = forbidden_words_regex.search(l)
if res and Config.getOption('ForbiddenWords'):
printWarning(pkg, 'description-use-invalid-word', lang,
res.group(1))
res = tag_regex.search(l)
if res:
printWarning(pkg, 'tag-in-description', lang, res.group(1))
def check_summary(self, pkg, lang, ignored_words):
summary = pkg.langtag(rpm.RPMTAG_SUMMARY, lang)
if use_utf8:
if not Pkg.is_utf8_bytestr(summary):
printError(pkg, 'tag-not-utf8', 'Summary', lang)
summary = Pkg.to_unicode(summary)
else:
summary = Pkg.b2s(summary)
self._unexpanded_macros(pkg, 'Summary(%s)' % lang, summary)
spell_check(pkg, summary, 'Summary(%s)', lang, ignored_words)
if '\n' in summary:
printError(pkg, 'summary-on-multiple-lines', lang)
if summary[0] != summary[0].upper():
printWarning(pkg, 'summary-not-capitalized', lang, summary)
if summary[-1] == '.':
printWarning(pkg, 'summary-ended-with-dot', lang, summary)
if len(summary) > max_line_len:
printError(pkg, 'summary-too-long', lang, summary)
if leading_space_regex.search(summary):
printError(pkg, 'summary-has-leading-spaces', lang, summary)
res = forbidden_words_regex.search(summary)
if res and Config.getOption('ForbiddenWords'):
printWarning(pkg, 'summary-use-invalid-word', lang, res.group(1))
if pkg.name:
sepchars = '[\s' + punct + ']'
res = re.search('(?:^|\s)(%s)(?:%s|$)' %
(re.escape(pkg.name), sepchars),
summary, re.IGNORECASE | re.UNICODE)
if res:
printWarning(pkg, 'name-repeated-in-summary', lang,
res.group(1))
# Create an object to enable the auto registration of the test
check = TagsCheck()
# Add information about checks
addDetails(
'summary-too-long',
'The "Summary:" must not exceed %d characters.' % max_line_len,
'invalid-version',
'''The version string must not contain the pre, alpha, beta or rc suffixes
because when the final version will be out, you will have to use an Epoch tag
to make the package upgradable. Instead put it in the release tag, prefixed
with something you have control over.''',
'spelling-error',
'''The value of this tag appears to be misspelled. Please double-check.''',
'no-packager-tag',
'''There is no Packager tag in your package. You have to specify a packager
using the Packager tag. Ex: Packager: John Doe <john.doe@example.com>.''',
'invalid-packager',
'''The packager email must end with an email compatible with the Packager
option of rpmlint. Please change it and rebuild your package.''',
'no-version-tag',
'''There is no Version tag in your package. You have to specify a version using
the Version tag.''',
'no-release-tag',
'''There is no Release tag in your package. You have to specify a release using
the Release tag.''',
'not-standard-release-extension',
'Your release tag must match the regular expression ' + release_ext + '.',
'no-name-tag',
'''There is no Name tag in your package. You have to specify a name using the
Name tag.''',
'non-coherent-filename',
'''The file which contains the package should be named
<NAME>-<VERSION>-<RELEASE>.<ARCH>.rpm.''',
'no-dependency-on',
'''
''',
'incoherent-version-dependency-on',
'''
''',
'no-version-dependency-on',
'''
''',
'no-major-in-name',
'''The major number of the library isn't included in the package's name.
''',
'no-provides',
'''Your library package doesn't provide the -devel name without the major
version included.''',
'no-summary-tag',
'''There is no Summary tag in your package. You have to describe your package
using this tag. To insert it, just insert a tag 'Summary'.''',
'summary-on-multiple-lines',
'''Your summary must fit on one line. Please make it shorter and rebuild the
package.''',
'summary-not-capitalized',
'''Summary doesn't begin with a capital letter.''',
'summary-ended-with-dot',
'''Summary ends with a dot.''',
'summary-has-leading-spaces',
'''Summary begins with whitespace which will waste space when displayed.''',
'no-description-tag',
'''The description of the package is empty or missing. To add it, insert a
%description section in your spec file, add a textual description of the
package after it, and rebuild the package.''',
'description-line-too-long',
'''Your description lines must not exceed %d characters. If a line is exceeding
this number, cut it to fit in two lines.''' % max_line_len,
'tag-in-description',
'''Something that looks like a tag was found in the package's description.
This may indicate a problem where the tag was not actually parsed as a tag
but just textual description content, thus being a no-op. Verify if this is
the case, and move the tag to a place in the specfile where %description
won't fool the specfile parser, and rebuild the package.''',
'no-group-tag',
'''There is no Group tag in your package. You have to specify a valid group
in your spec file using the Group tag.''',
'non-standard-group',
'''The value of the Group tag in the package is not valid. Valid groups are:
"%s".''' % '", "'.join(VALID_GROUPS),
'no-changelogname-tag',
'''There is no %changelog tag in your spec file. To insert it, just insert a
'%changelog' in your spec file and rebuild it.''',
'no-version-in-last-changelog',
'''The latest changelog entry doesn't contain a version. Please insert the
version that is coherent with the version of the package and rebuild it.''',
'incoherent-version-in-changelog',
'''The latest entry in %changelog contains a version identifier that is not
coherent with the epoch:version-release tuple of the package.''',
'changelog-time-overflow',
'''The timestamp of the latest entry in %changelog is suspiciously far away in
the past; it is possible that it is actually so much in the future that it
has overflowed rpm's timestamp representation.''',
'changelog-time-in-future',
'''The timestamp of the latest entry in %changelog is in the future.''',
'no-license',
'''There is no License tag in your spec file. You have to specify one license
for your program (eg. GPL). To insert this tag, just insert a 'License' in
your specfile.''',
'invalid-license',
'''The value of the License tag was not recognized. Known values are:
"%s".''' % '", "'.join(VALID_LICENSES),
'obsolete-not-provided',
'''If a package is obsoleted by a compatible replacement, the obsoleted package
should also be provided in order to not cause unnecessary dependency breakage.
If the obsoleting package is not a compatible replacement for the old one,
leave out the Provides.''',
'invalid-dependency',
'''An invalid dependency has been detected. It usually means that the build of
the package was buggy.''',
'no-epoch-tag',
'''There is no Epoch tag in your package. You have to specify an epoch using
the Epoch tag.''',
'unreasonable-epoch',
'''The value of your Epoch tag is unreasonably large (> 99).''',
'no-epoch-in-dependency',
'''Your package contains a versioned dependency without an Epoch.''',
'devel-dependency',
'''Your package has a dependency on a devel package but it's not a devel
package itself.''',
'invalid-build-requires',
'''Your source package contains a dependency not compliant with the lib64
naming. This BuildRequires dependency will not be resolved on lib64 platforms
(eg. amd64).''',
'explicit-lib-dependency',
'''You must let rpm find the library dependencies by itself. Do not put
unneeded explicit Requires: tags.''',
'useless-provides',
'''This package provides 2 times the same capacity. It should only provide it
once.''',
'tag-not-utf8',
'''The character encoding of the value of this tag is not UTF-8.''',
'requires-on-release',
'''This rpm requires a specific release of another package.''',
'no-url-tag',
'''The URL tag is missing.''',
'name-repeated-in-summary',
'''The name of the package is repeated in its summary. This is often redundant
information and looks silly in various programs' output. Make the summary
brief and to the point without including redundant information in it.''',
'enchant-dictionary-not-found',
'''A dictionary for the Enchant spell checking library is not available for
the language given in the info message. Spell checking will proceed with
rpmlint's built-in implementation for localized tags in this language.
For better spell checking results in this language, install the appropriate
dictionary that Enchant will use for this language, often for example
hunspell-* or aspell-*.''',
'self-obsoletion',
'''The package obsoletes itself. This is known to cause errors in various
tools and should thus be avoided, usually by using appropriately versioned
Obsoletes and/or Provides and avoiding unversioned ones.''',
'unexpanded-macro',
'''This tag contains something that looks like an unexpanded macro; this is
often the sign of a misspelling. Please check your specfile.''',
'private-shared-object-provides',
'''A shared object soname provides is provided by a file in a path from which
other packages should not directly load shared objects from. Such shared
objects should thus not be depended on and they should not result in provides
in the containing package. Get rid of the provides if appropriate, for example
by filtering it out during build. Note that in some cases this may require
disabling rpmbuild's internal dependency generator.''',
)
for i in ("obsoletes", "conflicts", "provides", "recommends", "suggests",
"enhances", "supplements"):
addDetails("no-epoch-in-%s" % i,
"Your package contains a versioned %s entry without an Epoch."
% i.capitalize())
# TagsCheck.py ends here
# ex: ts=4 sw=4 et
|
iLoveTux/data_store | refs/heads/master | build/lib/data/store/store.py | 2 | # -*- coding: utf-8 -*-
import uuid
from threading import RLock
import pickle
import base64
from itertools import cycle, izip
def encrypt(string, key="_"):
"""Return the base64 encoded XORed version of string. This is XORed with
key which defaults to a single underscore."""
return base64.encodestring(
''.join(
chr(ord(c) ^ ord(k)) for c, k in izip(string, cycle(key)))).strip()
def decrypt(string, key="_"):
"""Returns the base64 decoded, XORed version of string. This is XORed with
key, which defaults to a single underscore"""
string = base64.decodestring(string)
return ''.join(
chr(ord(c) ^ ord(k)) for c, k in izip(string, cycle(key))).strip()
class ResultList(list):
pass
LOCKS = {}
class Store(list):
def __init__(self, records=None):
"""This class is meant to be a parallel to a table in a
traditional DataBase. It inherits from list and contains
dicts which we call records.
If you pass in a list of dicts then they will be used to
initialize your store with records.
>>> store = Store([{'this': 'that'}])
>>> store2 = Store()
>>> store2.add_record(store.find_one({'this': 'that'})) #doctest: +ELLIPSIS
{'this': 'that', '_id':...}
>>> store == store2
True"""
if records:
for record in records:
self.add_record(record)
def add_record(self, record):
"""This method adds a record to this Store. record should be
a dict. There is no schema in data_store, so feel free to add
any valid Python dict.
Every record in data.store must have a unique value for
the field '_id', if you don't provide one then one will
be generated.
This method returns the record you passed in, but
with the '_id' field added if it wasn't present.
>>> store = Store()
>>> store
[]
>>> store.add_record({'this': 'that', '_id': 'test'})
{'this': 'that', '_id': 'test'}
>>> store
[{'this': 'that', '_id': 'test'}]
"""
if "_id" not in record:
record["_id"] = uuid.uuid4().hex
self.append(record)
return record
def sort(self, by="_id"):
"""Return a sorted Store. The records in the returned Store
will be sorted by the field named in by.
>>> store = Store([
... {"this": "b"},
... {"this": "a"}])
>>> srtd = store.sort(by="this")
>>> print srtd[0]["this"]
a
"""
return self.find({}, order_by=by)
def filter(self, desc, sanitize_list=None, encrypt_list=None,
password="_", order_by=None):
"""Returns a Store where any records matching desc is removed.
This is functionally the oposite of find.
>>> store = Store([
... {"this": "b"},
... {"this": "a"}])
>>> filtered = store.filter({"this": "a"})
>>> print len(filtered)
1
>>> print filtered[0]["this"]
b
"""
matches = self.find(desc)
ret = self.find(
desc={},
sanitize_list=sanitize_list,
encrypt_list=encrypt_list,
password=password,
order_by=order_by)
for match in list(matches):
ret.del_record({"_id": match["_id"]})
return ret
def group_by(self, by):
"""Returns a dict containing the values of by for the keys and
Stores for the values where the field referenced in by matches
the key.
>>> store = Store([
... {"this": "a"},
... {"this": "a"},
... {"this": "b"},
... {"this": "b"},
... {"this": "c"},
... {"this": "c"}])
>>> groups = store.group_by("this")
>>> print len(groups.keys())
3
>>> print len(groups["a"])
2
"""
groups = {}
for record in self:
if record[by] in groups:
groups[record[by]].append(record)
else:
groups[record[by]] = [record]
for k, v in dict(groups).items():
groups[k] = Store(v)
return groups
def del_record(self, desc):
"""This will delete a record from this Store matching desc
as long as desc only matches one record, otherwise raise a
ValueError. The record which was deleted is returned to you.
>>> store = Store([{'_id': 'that'}])
>>> store
[{'_id': 'that'}]
>>> store.del_record({'_id': 'that'})
{'_id': 'that'}
>>> store
[]
"""
record = self.find_one(desc)
records = self.find(desc)
if [record] != records:
raise ValueError(
"{} matches more than one record! Aborting...".format(
str(desc)))
if record:
self.remove(record)
return record
def del_records(self, desc):
"""This acts just as del_record except that it will happily
delete any number of records matching desc. The records which
were deleted are returned.
>>> store = Store([
... {'this': 'that', '_id': 'test1'},
... {'this': 'that', '_id': 'test2'},
... {'this': 'that', '_id': 'test3'}])
>>> store.del_records({'this': 'that'})
[{'this': 'that', '_id': 'test1'}, {'this': 'that', '_id': 'test2'}, {'this': 'that', '_id': 'test3'}]
"""
records = self.find(desc)
for record in records:
self.remove(record)
return records
def find_one(self, desc, sanitize_list=None, encrypt_list=None,
password="_"):
"""Returns one record matching desc, if more than one record
matches desc returns the first one.
desc should be a dict whose keys eveluate to one of the
following:
1. A value which will be tested for equality against the
value the key in each record in the store.
2. A compiled regular expression object (ie like the
value returned by re.compile). Each record in the store
will be tested for a match against the regex for the value of
key
3. A callable which accepts one argument (the value of key in
the current record) and returns True or False depending on
whether the record should be included in the result set.
If sanitize_list is specified then it must be an iterable
which contains values which when found as a key in a record
will sanitize the value of that field in the result set.
>>> store = Store([
... {'this': 'that', '_id': 'test1'},
... {'this': 'that', '_id': 'test2'},
... {'this': 'that', '_id': 'test3'}])
>>> store.find_one({'this': 'that'})
{'this': 'that', '_id': 'test1'}
"""
for item in self:
for key, value in desc.items():
if hasattr(value, "match"):
if not value.match(item.get(key, None)):
break
elif callable(value):
if not value(item[key]):
break
else:
if not value == item[key]:
break
else:
# Needed to account for changing the actual store,
# Rather than just sanitizing the ResultList
_item = item.copy()
if sanitize_list:
for key in sanitize_list:
if item.get(key, None):
_item[key] = "*" * 8
if encrypt_list:
for field in encrypt_list:
if item.get(field, None):
_item[field] = encrypt(_item[field], key=password)
return _item
def find(self, desc, sanitize_list=None, encrypt_list=None,
password="_", order_by=None):
"""Returns a ResultList containing records matching
desc. If sanitize_list is specified it should be an iterable
yielding keys of fields you would like sanitized. Those fields
will be set to a value of '********'.
desc should follow the same rules as defined above in the
docstring for find_one.
>>> store = Store([
... {'this': 'that', '_id': 'test1'},
... {'this': 'that', '_id': 'test2'},
... {'this': 'that', '_id': 'test3'}])
>>> store.find({'this': 'that'})
[{'this': 'that', '_id': 'test1'}, {'this': 'that', '_id': 'test2'}, {'this': 'that', '_id': 'test3'}]
"""
ret = ResultList()
for item in self:
for key, value in desc.items():
if hasattr(value, "match"):
if not value.match(item.get(key, None)):
break
elif callable(value):
if not value(item[key]):
break
else:
if not value == item.get(key, None):
break
else:
# Needed to account for changing the actual store,
# Rather than just sanitizing the ResultList
ret.append(item.copy())
for index, record in enumerate(list(ret)):
if sanitize_list:
for field in sanitize_list:
if record.get(field, None):
ret[index][field] = "*" * 8
if encrypt_list:
for field in encrypt_list:
if str(record.get(field, None)):
ret[index][field] = encrypt(
str(ret[index][field]), key=password)
if order_by is not None:
ret = sorted(ret, key=lambda k: k[order_by])
return Store(ret)
def persist(self, filename, password=None):
"""Persist current data_store to a file named filename.
A RLock from the threading module is used (unique by
filename) to ensure thread safety.
>>> store = Store([
... {'this': 'that', '_id': 'test1'},
... {'this': 'that', '_id': 'test2'},
... {'this': 'that', '_id': 'test3'}])
>>> store.persist("test.db")
>>> store2 = load("test.db")
>>> store == store2
True
"""
global LOCKS
if filename not in LOCKS:
LOCKS[filename] = RLock()
with LOCKS[filename]:
with open(filename, "wb") as fout:
pickle.dump(self, fout)
if password:
with open(filename, "rb") as fin:
contents = fin.read()
contents = encrypt(contents, key=password)
with open(filename, "wb") as fout:
fout.write(contents)
|
se000ra/myflaskcooktest | refs/heads/master | tests/conftest.py | 1 | # -*- coding: utf-8 -*-
"""Defines fixtures available to all tests."""
import os
import pytest
from webtest import TestApp
from myflaskcooktest.settings import TestConfig
from myflaskcooktest.app import create_app
from myflaskcooktest.database import db as _db
from .factories import UserFactory
@pytest.yield_fixture(scope='function')
def app():
_app = create_app(TestConfig)
ctx = _app.test_request_context()
ctx.push()
yield _app
ctx.pop()
@pytest.fixture(scope='session')
def testapp(app):
"""A Webtest app."""
return TestApp(app)
@pytest.yield_fixture(scope='function')
def db(app):
_db.app = app
with app.app_context():
_db.create_all()
yield _db
_db.drop_all()
@pytest.fixture
def user(db):
user = UserFactory(password='myprecious')
db.session.commit()
return user |
ChipaKraken/iRnWs | refs/heads/master | search_backend/language.py | 1 | # encoding=utf8
from re import findall as fa
from collections import defaultdict as dd
class LangChecker(object):
"""docstring for LangChecker"""
def __init__(self):
self.langs = {}
def training(self, lang, path):
data = dd(int)
with open(path) as book:
book = book.read()
for word in fa('\w+', book):
temp = word.lower()
data['^'+temp[:1]] += 1
data[temp[-1:]+'$'] += 1
while len(temp[:2]) == 2:
data[temp[:2]] += 1
data[temp[:1]] += 1
temp = temp[1:]
self.langs[lang] = dict(data)
def check_word(self, word, lang):
if (word[:2]) in self.langs[lang]:
# print float(self.langs[lang][word[:2]])/sum(float(v) for k,v in self.langs[lang].items() if word[:1] in k[:1])
return float(self.langs[lang][word[:2]])/sum(float(v) for k,v in self.langs[lang].items() if word[:1] in k[:1])
else:
first = float(self.langs[lang][word[:1]])/sum(float(v) for k,v in self.langs[lang].items() if len(k) == 1)
secon = float(self.langs[lang][word[1]])/sum(float(v) for k,v in self.langs[lang].items() if len(k) == 1)
return first*secon
def check(self, sentence):
result = dict.fromkeys(self.langs.keys(),1)
for word in fa('\w+', sentence):
temp = '^'+word.lower()+'$'
while len(temp[:2]) == 2:
for lang in self.langs.keys():
result[lang] *= self.check_word(temp, lang)
temp = temp[1:]
return max(result, key=lambda k: result[k])
class AuthorChecker(object):
"""docstring for LangChecker"""
def __init__(self):
self.langs = {}
def training(self, lang, path):
data = dd(int)
with open(path) as book:
book = book.read()
for word in fa('\w+ \w+', book):
temp = word.lower()
word1, word2 = temp.split()
print word1, word2
data['^'+temp[:1]] += 1
data[temp[-1:]+'$'] += 1
while len(temp[:2]) == 2:
data[temp[:2]] += 1
data[temp[:1]] += 1
temp = temp[1:]
self.langs[lang] = dict(data)
def check_word(self, word, lang):
if (word[:2]) in self.langs[lang]:
# print float(self.langs[lang][word[:2]])/sum(float(v) for k,v in self.langs[lang].items() if word[:1] in k[:1])
return float(self.langs[lang][word[:2]])/sum(float(v) for k,v in self.langs[lang].items() if word[:1] in k[:1])
else:
first = float(self.langs[lang][word[:1]])/sum(float(v) for k,v in self.langs[lang].items() if len(k) == 1)
secon = float(self.langs[lang][word[1]])/sum(float(v) for k,v in self.langs[lang].items() if len(k) == 1)
return first*secon
def check(self, sentence):
result = dict.fromkeys(self.langs.keys(),1)
for word in fa('\w+', sentence):
temp = '^'+word.lower()+'$'
while len(temp[:2]) == 2:
for lang in self.langs.keys():
result[lang] *= self.check_word(temp, lang)
temp = temp[1:]
return max(result, key=lambda k: result[k])
# checker = LangChecker()
checker = AuthorChecker()
checker.training('English', 'data/pg23488.txt')
checker.training('Dutch', 'data/pg18066.txt')
print checker.check('De ontdekker van Amerika')
print checker.check('The Discoverer of America') |
p0psicles/SickGear | refs/heads/master | lib/unidecode/x084.py | 252 | data = (
'Hu ', # 0x00
'Qi ', # 0x01
'He ', # 0x02
'Cui ', # 0x03
'Tao ', # 0x04
'Chun ', # 0x05
'Bei ', # 0x06
'Chang ', # 0x07
'Huan ', # 0x08
'Fei ', # 0x09
'Lai ', # 0x0a
'Qi ', # 0x0b
'Meng ', # 0x0c
'Ping ', # 0x0d
'Wei ', # 0x0e
'Dan ', # 0x0f
'Sha ', # 0x10
'Huan ', # 0x11
'Yan ', # 0x12
'Yi ', # 0x13
'Tiao ', # 0x14
'Qi ', # 0x15
'Wan ', # 0x16
'Ce ', # 0x17
'Nai ', # 0x18
'Kutabireru ', # 0x19
'Tuo ', # 0x1a
'Jiu ', # 0x1b
'Tie ', # 0x1c
'Luo ', # 0x1d
'[?] ', # 0x1e
'[?] ', # 0x1f
'Meng ', # 0x20
'[?] ', # 0x21
'Yaji ', # 0x22
'[?] ', # 0x23
'Ying ', # 0x24
'Ying ', # 0x25
'Ying ', # 0x26
'Xiao ', # 0x27
'Sa ', # 0x28
'Qiu ', # 0x29
'Ke ', # 0x2a
'Xiang ', # 0x2b
'Wan ', # 0x2c
'Yu ', # 0x2d
'Yu ', # 0x2e
'Fu ', # 0x2f
'Lian ', # 0x30
'Xuan ', # 0x31
'Yuan ', # 0x32
'Nan ', # 0x33
'Ze ', # 0x34
'Wo ', # 0x35
'Chun ', # 0x36
'Xiao ', # 0x37
'Yu ', # 0x38
'Pian ', # 0x39
'Mao ', # 0x3a
'An ', # 0x3b
'E ', # 0x3c
'Luo ', # 0x3d
'Ying ', # 0x3e
'Huo ', # 0x3f
'Gua ', # 0x40
'Jiang ', # 0x41
'Mian ', # 0x42
'Zuo ', # 0x43
'Zuo ', # 0x44
'Ju ', # 0x45
'Bao ', # 0x46
'Rou ', # 0x47
'Xi ', # 0x48
'Xie ', # 0x49
'An ', # 0x4a
'Qu ', # 0x4b
'Jian ', # 0x4c
'Fu ', # 0x4d
'Lu ', # 0x4e
'Jing ', # 0x4f
'Pen ', # 0x50
'Feng ', # 0x51
'Hong ', # 0x52
'Hong ', # 0x53
'Hou ', # 0x54
'Yan ', # 0x55
'Tu ', # 0x56
'Zhu ', # 0x57
'Zi ', # 0x58
'Xiang ', # 0x59
'Shen ', # 0x5a
'Ge ', # 0x5b
'Jie ', # 0x5c
'Jing ', # 0x5d
'Mi ', # 0x5e
'Huang ', # 0x5f
'Shen ', # 0x60
'Pu ', # 0x61
'Gai ', # 0x62
'Dong ', # 0x63
'Zhou ', # 0x64
'Qian ', # 0x65
'Wei ', # 0x66
'Bo ', # 0x67
'Wei ', # 0x68
'Pa ', # 0x69
'Ji ', # 0x6a
'Hu ', # 0x6b
'Zang ', # 0x6c
'Jia ', # 0x6d
'Duan ', # 0x6e
'Yao ', # 0x6f
'Jun ', # 0x70
'Cong ', # 0x71
'Quan ', # 0x72
'Wei ', # 0x73
'Xian ', # 0x74
'Kui ', # 0x75
'Ting ', # 0x76
'Hun ', # 0x77
'Xi ', # 0x78
'Shi ', # 0x79
'Qi ', # 0x7a
'Lan ', # 0x7b
'Zong ', # 0x7c
'Yao ', # 0x7d
'Yuan ', # 0x7e
'Mei ', # 0x7f
'Yun ', # 0x80
'Shu ', # 0x81
'Di ', # 0x82
'Zhuan ', # 0x83
'Guan ', # 0x84
'Sukumo ', # 0x85
'Xue ', # 0x86
'Chan ', # 0x87
'Kai ', # 0x88
'Kui ', # 0x89
'[?] ', # 0x8a
'Jiang ', # 0x8b
'Lou ', # 0x8c
'Wei ', # 0x8d
'Pai ', # 0x8e
'[?] ', # 0x8f
'Sou ', # 0x90
'Yin ', # 0x91
'Shi ', # 0x92
'Chun ', # 0x93
'Shi ', # 0x94
'Yun ', # 0x95
'Zhen ', # 0x96
'Lang ', # 0x97
'Nu ', # 0x98
'Meng ', # 0x99
'He ', # 0x9a
'Que ', # 0x9b
'Suan ', # 0x9c
'Yuan ', # 0x9d
'Li ', # 0x9e
'Ju ', # 0x9f
'Xi ', # 0xa0
'Pang ', # 0xa1
'Chu ', # 0xa2
'Xu ', # 0xa3
'Tu ', # 0xa4
'Liu ', # 0xa5
'Wo ', # 0xa6
'Zhen ', # 0xa7
'Qian ', # 0xa8
'Zu ', # 0xa9
'Po ', # 0xaa
'Cuo ', # 0xab
'Yuan ', # 0xac
'Chu ', # 0xad
'Yu ', # 0xae
'Kuai ', # 0xaf
'Pan ', # 0xb0
'Pu ', # 0xb1
'Pu ', # 0xb2
'Na ', # 0xb3
'Shuo ', # 0xb4
'Xi ', # 0xb5
'Fen ', # 0xb6
'Yun ', # 0xb7
'Zheng ', # 0xb8
'Jian ', # 0xb9
'Ji ', # 0xba
'Ruo ', # 0xbb
'Cang ', # 0xbc
'En ', # 0xbd
'Mi ', # 0xbe
'Hao ', # 0xbf
'Sun ', # 0xc0
'Zhen ', # 0xc1
'Ming ', # 0xc2
'Sou ', # 0xc3
'Xu ', # 0xc4
'Liu ', # 0xc5
'Xi ', # 0xc6
'Gu ', # 0xc7
'Lang ', # 0xc8
'Rong ', # 0xc9
'Weng ', # 0xca
'Gai ', # 0xcb
'Cuo ', # 0xcc
'Shi ', # 0xcd
'Tang ', # 0xce
'Luo ', # 0xcf
'Ru ', # 0xd0
'Suo ', # 0xd1
'Xian ', # 0xd2
'Bei ', # 0xd3
'Yao ', # 0xd4
'Gui ', # 0xd5
'Bi ', # 0xd6
'Zong ', # 0xd7
'Gun ', # 0xd8
'Za ', # 0xd9
'Xiu ', # 0xda
'Ce ', # 0xdb
'Hai ', # 0xdc
'Lan ', # 0xdd
'[?] ', # 0xde
'Ji ', # 0xdf
'Li ', # 0xe0
'Can ', # 0xe1
'Lang ', # 0xe2
'Yu ', # 0xe3
'[?] ', # 0xe4
'Ying ', # 0xe5
'Mo ', # 0xe6
'Diao ', # 0xe7
'Tiao ', # 0xe8
'Mao ', # 0xe9
'Tong ', # 0xea
'Zhu ', # 0xeb
'Peng ', # 0xec
'An ', # 0xed
'Lian ', # 0xee
'Cong ', # 0xef
'Xi ', # 0xf0
'Ping ', # 0xf1
'Qiu ', # 0xf2
'Jin ', # 0xf3
'Chun ', # 0xf4
'Jie ', # 0xf5
'Wei ', # 0xf6
'Tui ', # 0xf7
'Cao ', # 0xf8
'Yu ', # 0xf9
'Yi ', # 0xfa
'Ji ', # 0xfb
'Liao ', # 0xfc
'Bi ', # 0xfd
'Lu ', # 0xfe
'Su ', # 0xff
)
|
zygmuntz/pybrain | refs/heads/master | pybrain/structure/modules/table.py | 31 | __author__ = 'Thomas Rueckstiess, ruecksti@in.tum.de'
from pybrain.structure.modules.module import Module
from pybrain.structure.parametercontainer import ParameterContainer
class Table(Module, ParameterContainer):
""" implements a simple 2D table with dimensions rows x columns,
which is basically a wrapper for a numpy array.
"""
def __init__(self, numRows, numColumns, name=None):
""" initialize with the number of rows and columns. the table
values are all set to zero.
"""
Module.__init__(self, 2, 1, name)
ParameterContainer.__init__(self, numRows*numColumns)
self.numRows = numRows
self.numColumns = numColumns
def _forwardImplementation(self, inbuf, outbuf):
""" takes two coordinates, row and column, and returns the
value in the table.
"""
outbuf[0] = self.params.reshape(self.numRows, self.numColumns)[inbuf[0], inbuf[1]]
def updateValue(self, row, column, value):
""" set the value at a certain location in the table. """
self.params.reshape(self.numRows, self.numColumns)[row, column] = value
def getValue(self, row, column):
""" return the value at a certain location in the table. """
return self.params.reshape(self.numRows, self.numColumns)[row, column]
|
CaliOpen/CaliOpen | refs/heads/master | src/backend/main/py.main/caliopen_main/common/store/tag.py | 1 | # -*- coding: utf-8 -*-
"""Caliopen tag objects."""
from __future__ import absolute_import, print_function, unicode_literals
from cassandra.cqlengine import columns
from elasticsearch_dsl import InnerObjectWrapper, Date, Integer
from elasticsearch_dsl import Boolean, Keyword
from caliopen_storage.store import BaseUserType
class ResourceTag(BaseUserType):
"""Tag nested in resource model."""
_pkey = 'tag_id'
date_insert = columns.DateTime()
importance_level = columns.Integer()
name = columns.Text()
tag_id = columns.UUID()
type = columns.Text()
class IndexedResourceTag(InnerObjectWrapper):
"""Nested tag into indexed resource model."""
date_insert = Date()
importance_level = Integer()
name = Keyword()
tag_id = Keyword()
type = Boolean()
|
ch3ll0v3k/scikit-learn | refs/heads/master | sklearn/cluster/__init__.py | 364 | """
The :mod:`sklearn.cluster` module gathers popular unsupervised clustering
algorithms.
"""
from .spectral import spectral_clustering, SpectralClustering
from .mean_shift_ import (mean_shift, MeanShift,
estimate_bandwidth, get_bin_seeds)
from .affinity_propagation_ import affinity_propagation, AffinityPropagation
from .hierarchical import (ward_tree, AgglomerativeClustering, linkage_tree,
FeatureAgglomeration)
from .k_means_ import k_means, KMeans, MiniBatchKMeans
from .dbscan_ import dbscan, DBSCAN
from .bicluster import SpectralBiclustering, SpectralCoclustering
from .birch import Birch
__all__ = ['AffinityPropagation',
'AgglomerativeClustering',
'Birch',
'DBSCAN',
'KMeans',
'FeatureAgglomeration',
'MeanShift',
'MiniBatchKMeans',
'SpectralClustering',
'affinity_propagation',
'dbscan',
'estimate_bandwidth',
'get_bin_seeds',
'k_means',
'linkage_tree',
'mean_shift',
'spectral_clustering',
'ward_tree',
'SpectralBiclustering',
'SpectralCoclustering']
|
grucin/Potyczki-Algorytmiczne-2016 | refs/heads/master | sze/test.py | 1 | import unittest
from sze import sze
class SzeTests(unittest.TestCase):
def test_ok(self):
self.assertEqual(
sze(2, ((3, 8, 3), (2, 5, 2), (3, 7, 3))), 1)
def test_ok_full(self):
self.assertEqual(
sze(3, (
(0, 9, 6),
(0, 8, 6),
(0, 9, 6),
(4, 8, 3),
(5, 7, 2),
(4, 9, 4)
)), 1)
def test_kamil(self):
self.assertEqual(
sze(2, (
(0, 3, 1),
(0, 7, 3),
(1, 2, 1),
(1, 2, 1),
(0, 1, 1),
(3, 6, 3),
(3, 6, 3)
)), 1)
def test_fail(self):
self.assertEqual(sze(1, ((0, 1, 1), (0, 1, 1))), 0)
|
cuckoobox/cuckoo | refs/heads/master | cuckoo/machinery/xenserver.py | 1 | # Copyright (C) 2014-2017 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import logging
import threading
from cuckoo.common.abstracts import Machinery
from cuckoo.common.exceptions import CuckooMachineError, CuckooDependencyError
try:
import XenAPI
HAVE_XENAPI = True
except ImportError:
HAVE_XENAPI = False
log = logging.getLogger(__name__)
class XenServer(Machinery):
"""Virtualization layer for XenServer using the XenAPI XML-RPC interface."""
LABEL = "uuid"
# Power States.
RUNNING = "Running"
PAUSED = "Paused"
POWEROFF = "Halted"
ABORTED = "Suspended"
def _initialize_check(self):
"""Check XenServer configuration, initialize a Xen API connection, and
verify machine validity.
"""
self._sessions = {}
if not HAVE_XENAPI:
raise CuckooDependencyError("Unable to import XenAPI")
if not self.options.xenserver.user:
raise CuckooMachineError("XenServer username missing, please add "
"it to xenserver.conf.")
if not self.options.xenserver.password:
raise CuckooMachineError("XenServer password missing, please add "
"it to xenserver.conf")
if not self.options.xenserver.url:
raise CuckooMachineError("XenServer url missing, please add it to "
"xenserver.conf")
self._make_xenapi_session()
for machine in self.machines():
uuid = machine.label
(ref, vm) = self._check_vm(uuid)
if machine.snapshot:
self._check_snapshot(uuid, machine.snapshot)
else:
self._check_disks_reset(vm)
super(XenServer, self)._initialize_check()
@property
def session(self):
tid = threading.current_thread().ident
sess = self._sessions.get(tid, None)
if sess is None:
sess = self._make_xenapi_session(tid)
return sess
def _make_xenapi_session(self, tid=None):
tid = tid or threading.current_thread().ident
try:
sess = XenAPI.Session(self.options.xenserver.url)
except:
raise CuckooMachineError("Could not connect to XenServer: invalid "
"or incorrect url, please ensure the url "
"is correct in xenserver.conf")
try:
sess.xenapi.login_with_password(
self.options.xenserver.user, self.options.xenserver.password
)
except:
raise CuckooMachineError("Could not connect to XenServer: "
"incorrect credentials, please ensure "
"the user and password are correct in "
"xenserver.conf")
self._sessions[tid] = sess
return sess
def _get_vm_ref(self, uuid):
"""Get a virtual machine reference.
@param uuid: vm uuid
"""
return self.session.xenapi.VM.get_by_uuid(uuid.lower())
def _get_vm_record(self, ref):
"""Get the virtual machine record.
@param ref: vm reference
"""
return self.session.xenapi.VM.get_record(ref)
def _get_vm_power_state(self, ref):
"""Get the virtual machine power state.
@param ref: vm reference
"""
return self.session.xenapi.VM.get_power_state(ref)
def _check_vm(self, uuid):
"""Check vm existence and validity.
@param uuid: vm uuid
"""
try:
ref = self._get_vm_ref(uuid)
vm = self._get_vm_record(ref)
except XenAPI.Failure as e:
raise CuckooMachineError("Vm not found: %s: %s"
% (uuid, e.details[0]))
if vm["is_a_snapshot"]:
raise CuckooMachineError("Vm is a snapshot: %s" % uuid)
if vm["is_a_template"]:
raise CuckooMachineError("Vm is a template: %s" % uuid)
if vm["is_control_domain"]:
raise CuckooMachineError("Vm is a control domain: %s" % uuid)
return (ref, vm)
def _check_snapshot(self, vm_uuid, snapshot_uuid):
"""Check snapshot existence and that the snapshot is of the specified
vm uuid.
@param vm_uuid: vm uuid
@param snapshot_uuid: snapshot uuid
"""
try:
snapshot_ref = self._get_vm_ref(snapshot_uuid)
snapshot = self._get_vm_record(snapshot_ref)
except:
raise CuckooMachineError("Snapshot not found: %s" % snapshot_uuid)
if not snapshot["is_a_snapshot"]:
raise CuckooMachineError("Invalid snapshot: %s" % snapshot_uuid)
try:
parent = self._get_vm_record(snapshot["snapshot_of"])
except:
raise CuckooMachineError("Invalid snapshot: %s" % snapshot_uuid)
parent_uuid = parent["uuid"]
if parent_uuid != vm_uuid:
raise CuckooMachineError("Snapshot does not belong to specified "
"vm: %s" % snapshot_uuid)
def _check_disks_reset(self, vm):
"""Check whether each attached disk is set to reset on boot.
@param vm: vm record
"""
for ref in vm["VBDs"]:
try:
vbd = self.session.xenapi.VBD.get_record(ref)
except:
log.warning("Invalid VBD for vm %s: %s", vm["uuid"], ref)
continue
if vbd["type"] == "Disk":
vdi_ref = vbd["VDI"]
try:
vdi = self.session.xenapi.VDI.get_record(vdi_ref)
except:
log.warning("Invalid VDI for vm %s: %s", vm["uuid"],
vdi_ref)
continue
if vdi["on_boot"] != "reset" and vdi["read_only"] is False:
raise CuckooMachineError(
"Vm %s contains invalid VDI %s: disk is not reset on "
"boot. Please set the on-boot parameter to 'reset'."
% (vm["uuid"], vdi["uuid"]))
def _snapshot_from_vm_uuid(self, uuid):
"""Get the snapshot uuid from a virtual machine.
@param uuid: vm uuid
"""
machine = self.db.view_machine_by_label(uuid)
return machine.snapshot
def _is_halted(self, vm):
"""Checks if the virtual machine is running.
@param uuid: vm uuid
"""
return vm["power_state"] == "Halted"
def start(self, label, task):
"""Start a virtual machine.
@param label: vm uuid
@param task: task object.
"""
vm_ref = self._get_vm_ref(label)
vm = self._get_vm_record(vm_ref)
if not self._is_halted(vm):
raise CuckooMachineError("Vm is already running: %s", label)
snapshot = self._snapshot_from_vm_uuid(label)
if snapshot:
snapshot_ref = self._get_vm_ref(snapshot)
try:
log.debug("Reverting vm %s to snapshot %s", label, snapshot)
self.session.xenapi.VM.revert(snapshot_ref)
log.debug("Revert completed for vm %s", label)
except XenAPI.Failure as e:
raise CuckooMachineError("Unable to revert vm %s: %s"
% (label, e.details[0]))
try:
log.debug("Resuming reverted vm %s", label)
self.session.xenapi.VM.resume(vm_ref, False, False)
except XenAPI.Failure as e:
raise CuckooMachineError("Unable to resume vm %s: %s"
% (label, e.details[0]))
else:
log.debug("No snapshot found for vm, booting: %s", label)
try:
self.session.xenapi.VM.start(vm_ref, False, False)
except XenAPI.Failure as e:
raise CuckooMachineError("Unable to start vm %s: %s"
% (label, e.details[0]))
log.debug("Started vm: %s", label)
def stop(self, label=None):
"""Stop a virtual machine.
@param label: vm uuid
"""
ref = self._get_vm_ref(label)
vm = self._get_vm_record(ref)
if self._is_halted(vm):
log.warning("Trying to stop an already stopped machine: %s", label)
else:
try:
self.session.xenapi.VM.hard_shutdown(ref)
except XenAPI.Failure as e:
raise CuckooMachineError("Error shutting down virtual machine:"
" %s: %s" % (label, e.details[0]))
def _list(self):
"""List available virtual machines.
@raise CuckooMachineError: if unable to list virtual machines.
"""
try:
vm_list = []
for ref in self.session.xenapi.VM.get_all():
vm = self._get_vm_record(ref)
vm_list.append(vm['uuid'])
except:
raise CuckooMachineError("Cannot list domains")
else:
return vm_list
def _status(self, label):
"""Gets current status of a vm.
@param label: virtual machine uuid
@return: status string.
"""
ref = self._get_vm_ref(label)
state = self._get_vm_power_state(ref)
return state
|
ar7z1/ansible | refs/heads/devel | lib/ansible/executor/process/__init__.py | 2520 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
mick-d/nipype | refs/heads/master | nipype/interfaces/diffusion_toolkit/tests/__init__.py | 14224 | # -*- coding: utf-8 -*-
|
katepanping/libyuv | refs/heads/master | gyp_libyuv.py | 179 | #!/usr/bin/env python
#
# Copyright 2014 The LibYuv Project Authors. All rights reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# This script is a modified copy of the src/build/gyp_chromium.py file.
# It is needed for parallel processing.
# This file is (possibly, depending on python version) imported by
# gyp_libyuv when GYP_PARALLEL=1 and it creates sub-processes
# through the multiprocessing library.
# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
# imports that don't end in .py (and aren't directories with an
# __init__.py). This wrapper makes "import gyp_libyuv" work with
# those old versions and makes it possible to execute gyp_libyuv.py
# directly on Windows where the extension is useful.
import os
path = os.path.abspath(os.path.split(__file__)[0])
execfile(os.path.join(path, 'gyp_libyuv'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.