code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GroupGetMemberGroupsParameters(Model):
"""Request parameters for GetMemberGroups API call.
:param security_enabled_only: If true, only membership in security-enabled
groups should be checked. Otherwise, membership in all groups should be
checked.
:type security_enabled_only: bool
"""
_validation = {
'security_enabled_only': {'required': True},
}
_attribute_map = {
'security_enabled_only': {'key': 'securityEnabledOnly', 'type': 'bool'},
}
def __init__(self, security_enabled_only):
self.security_enabled_only = security_enabled_only
|
v-iam/azure-sdk-for-python
|
azure-graphrbac/azure/graphrbac/models/group_get_member_groups_parameters.py
|
Python
|
mit
| 1,131
|
from math import sqrt
def sq_dist(p, q):
return((p[0] - q[0])**2 + (p[1] - q[1])**2)
def linear_search(points, query):
sqd = float("inf")
for point in points:
d = sq_dist(point, query)
if d < sqd:
nearest = point
sqd = d
return(nearest, sqd)
point_list = [(2, 3), (5, 4), (9, 6), (4, 7), (8, 1), (7, 2)]
n = linear_search(point_list, (9, 2))
print('nearest:', n[0], 'dist:', sqrt(n[1]))
# nearest: (8, 1) dist: 1.4142135623730951
|
o-kei/design-computing-aij
|
ch4_1/nearest_liner.py
|
Python
|
mit
| 491
|
#!/usr/bin/env python3
# Author: Brian Fiedler 29 June 2016
# Converts the CRUTEM text data files into a convenient Python dictionary.
# Go to http://www.metoffice.gov.uk/hadobs/crutem4/data/download.html and you should see
# a link to http://www.metoffice.gov.uk/hadobs/crutem4/data/station_files/CRUTEM.4.4.0.0.station_files.zip
# Download that and unzip it, and set path_crutem below.
# Then: python CRUTEM_to_pkl.py
# And: python CRUTEM_to_pkl.py mini
# Example of using the pkl files that were produced:
# inpkl = open('/data/crutem4/crutem44_mini.pkl','rb')
# crutem = pickle.load(inpkl)
# sitename = crutem['723530']['Name']
# monthAvgTemp = crutem['723530']['obs'][2011][6]
# print("July 2011 temperature at",sitename,":",monthAvgTemp)
import glob,sys,pickle
path_crutem = "/data/crutem4/CRUTEM.4.4.0.0.station_files" # Configure for your computer
outpklname='crutem44_all.pkl'
if len(sys.argv)>1 and sys.argv[1]=='mini':
outpklname='crutem44_mini.pkl' #note: mini in name triggers filter below
files=glob.glob(path_crutem+'/*/*')
print(files)
files.sort()
##############
def crutemread(fn,verbose=False):
''' for reading hadcrut station data files'''
inf = open(fn,'r',encoding="ISO-8859-1") # open fn for reading
lines = inf.readlines()#
h={} # data file will be converted to a dictionary and store here
doingObs = False # this changes to True when 'Obs.' is encountered in a line
h['obs']={} # will have integer years as keys, to hold list of monthly temperatures
for line in lines:
if line[0:5]=='Obs:':
doingObs=True
continue
if not doingObs: # parameter values are put in dictionary
s = line.strip().split('=')
parts = [x.capitalize() for x in s[0].split()]
key = ''.join(parts)
v = s[-1].strip()
if v.isdigit():
value = int(v)
else:
try:
value = float(v)
except:
value = v
if verbose: print(key,value)
h[key] = value
else: #process the line the begins with a year number
s = line.strip().split()
key = int(s[0])
values = [float(x) for x in s[1:]] # 12 temperature numbers, and 12 code numbers
h['obs'][key] = values
return h
# When making a minature pkl file with just 12 sites, only these values for filenames are retained
minikeep = '724830 725300 722230 225500 724210 756039 719360 702000 723530 040300 725460 014920'.split()
qall = {} # will be a master dictionary of dictionaries, to be pickled
count = 0
for filename in files:
z = filename.split('<')
if 'mini' in outpklname and z[-1] not in minikeep: continue
print(filename)
q = crutemread(filename,verbose=False) # The file is put into a Python dictionary
recn = z[-1] # dictionary key is the file name
qall[recn] = q # add dictionary to the master dictionary that will be pickled
count += 1
poufa = open(outpklname,'wb')
pickle.dump(qall,poufa,-1)
poufa.close()
print("wrote",outpklname,"number of sites=",count)
|
KamilKwiatkowski123/Repozytorium
|
recipe-580687-1.py
|
Python
|
gpl-3.0
| 3,146
|
# Creating user accounts for the Messidge python library
# (c) 2017 David Preece, this work is in the public domain
import sys
from messidge.broker.identity import Identity
if len(sys.argv) != 2:
print("Run: python3 gen_user_account.py user@email", file=sys.stderr)
exit(1)
identity = Identity()
try:
token = identity.create_pending_user(sys.argv[1])
print("The user will need this token: " + token)
finally:
identity.stop() # because identity runs a background thread
|
RantyDave/messidge
|
demo/gen_user_account.py
|
Python
|
bsd-2-clause
| 493
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
from __future__ import unicode_literals
import logging
import os
from datetime import datetime
from . import __version__ as cookiecutter_version
from .config import get_user_config, USER_CONFIG_PATH
from .prompt import prompt_for_config
from .generate import generate_context, generate_files
from .vcs import clone
from .compat import PY3
logger = logging.getLogger(__name__)
builtin_abbreviations = {
'gh': 'https://github.com/{0}.git',
'bb': 'https://bitbucket.org/{0}',
}
def expand_abbreviations(template, config_dict):
"""
Expand abbreviations in a template name.
:param template: The project template name.
:param config_dict: The user config, which will contain abbreviation
definitions.
"""
abbreviations = builtin_abbreviations.copy()
abbreviations.update(config_dict.get('abbreviations', {}))
if template in abbreviations:
return abbreviations[template]
# Split on colon. If there is no colon, rest will be empty
# and prefix will be the whole template
prefix, sep, rest = template.partition(':')
if prefix in abbreviations:
return abbreviations[prefix].format(rest)
return template
def cookiecutter(template, checkout=None, no_input=False, extra_context=None,
extra_globals=None, rc_file=USER_CONFIG_PATH):
"""
API equivalent to using Cookiecutter at the command line.
:param template: A directory containing a project template directory,
or a URL to a git repository.
:param checkout: The branch, tag or commit ID to checkout after clone.
:param no_input: Prompt the user at command line for manual configuration?
:param extra_context: A dictionary of context that overrides default
and user configuration.
:param extra_globals: A dictionary of values added to the Jinja2 context,
e.g. custom filters.
:param rc_file: Path to the user configuration file
"""
# Get user config from ~/.cookiecutterrc or equivalent
# If no config file, sensible defaults from config.DEFAULT_CONFIG are used
config_dict = get_user_config(rc_file)
template = expand_abbreviations(template, config_dict)
# TODO: find a better way to tell if it's a repo URL
if 'git@' in template or 'https://' in template:
repo_dir = clone(
repo_url=template,
checkout=checkout,
clone_to_dir=config_dict['cookiecutters_dir'],
no_input=no_input
)
else:
# If it's a local repo, no need to clone or copy to your
# cookiecutters_dir
repo_dir = template
context_file = os.path.join(repo_dir, 'cookiecutter.json')
logging.debug('context_file is {0}'.format(context_file))
context = generate_context(
context_file=context_file,
default_context=config_dict['default_context'],
extra_context=extra_context,
)
# prompt the user to manually configure at the command line.
# except when 'no-input' flag is set
context['cookiecutter'] = prompt_for_config(context, no_input)
# Add some system values, especially for use by hook scripts
now = datetime.now()
context.update(extra_globals or {})
context.update(dict(
version=cookiecutter_version,
repo_dir=os.path.abspath(repo_dir),
context_file=os.path.abspath(context_file),
current_year=now.year,
current_date=now.ctime(),
current_date_iso=now.isoformat(b' ' if not PY3 else u' '),
))
# Create project from local context and project template.
generate_files(
repo_dir=repo_dir,
context=context
)
|
jhermann/cookiecutter
|
cookiecutter/main.py
|
Python
|
bsd-3-clause
| 3,903
|
#-*- encoding: utf-8
import maya.cmds
import maya.app.mayabullet as bullet
import maya.OpenMaya
import os.path
import util
class RigidBodyGenerator:
def __init__(self, mmdData, filePath, rigiddict):
self.mmdData = mmdData
self.directory = os.path.dirname(filePath)
#self.nameDict, self.dictFlag = filemanager.openCSV(self.directory, "rigiddict.csv")
self.nameDict = rigiddict
self.dictFlag = True
self.constPI = 180.0 / 3.141592653589793
def _createPostureCube(self, rigid):
pCube = maya.cmds.polyCube()[0]
pos = rigid.shape_position
maya.cmds.setAttr("%s.translateX" % pCube, pos.x)
maya.cmds.setAttr("%s.translateY" % pCube, pos.y)
maya.cmds.setAttr("%s.translateZ" % pCube, -pos.z)
rot = rigid.shape_rotation
rot = maya.OpenMaya.MEulerRotation(-rot.x, rot.y, rot.z, 4) #kYXZ
quat = rot.asQuaternion()
rot = quat.asEulerRotation()
maya.cmds.setAttr("%s.rotateOrder" % pCube, 4) # kYXZ
maya.cmds.setAttr("%s.rotateX" % pCube, rot.x * self.constPI)
maya.cmds.setAttr("%s.rotateY" % pCube, rot.y * self.constPI)
maya.cmds.setAttr("%s.rotateZ" % pCube, rot.z * self.constPI)
maya.cmds.setAttr("%s.scaleX" % pCube, 0.0001)
maya.cmds.setAttr("%s.scaleY" % pCube, 0.0001)
maya.cmds.setAttr("%s.scaleZ" % pCube, 0.0001)
maya.cmds.makeIdentity(apply=True, s=1)
return pCube
def _setColliderSize(self, shape, rigid):
maya.cmds.setAttr("%s.autoFit" % shape, 0)
shapeSize = rigid.shape_size
if rigid.shape_type == 0: # sphere
maya.cmds.setAttr("%s.colliderShapeType" % shape, 2)
maya.cmds.setAttr("%s.radius" % shape, shapeSize[0])
util.setFloat(shape, "defaultRadius", shapeSize[0])
elif rigid.shape_type == 1: # box
maya.cmds.setAttr("%s.colliderShapeType" % shape, 1)
maya.cmds.setAttr("%s.extentsX" % shape, shapeSize[0] * 2)
maya.cmds.setAttr("%s.extentsY" % shape, shapeSize[1] * 2)
maya.cmds.setAttr("%s.extentsZ" % shape, shapeSize[2] * 2)
util.setFloat(shape, "defaultExtentsX", shapeSize[0] * 2)
util.setFloat(shape, "defaultExtentsY", shapeSize[1] * 2)
util.setFloat(shape, "defaultExtentsZ", shapeSize[2] * 2)
elif rigid.shape_type == 2: # capsule
maya.cmds.setAttr("%s.colliderShapeType" % shape, 3)
maya.cmds.setAttr("%s.radius" % shape, shapeSize[0])
maya.cmds.setAttr("%s.length" % shape, shapeSize[1] + shapeSize[0] * 2)
util.setFloat(shape, "defaultRadius", shapeSize[0])
util.setFloat(shape, "defaultLength", shapeSize[1] + shapeSize[0] * 2)
def _setParams(self, shape, param):
maya.cmds.setAttr("%s.mass" % shape, param.mass)
maya.cmds.setAttr("%s.linearDamping" % shape, param.linear_damping)
maya.cmds.setAttr("%s.friction" % shape, param.friction)
maya.cmds.setAttr("%s.angularDamping" % shape, param.angular_damping)
maya.cmds.setAttr("%s.restitution" % shape, param.restitution)
util.setFloat(shape, "defaultMass", param.mass)
util.setFloat(shape, "defaultLinearDamping", param.linear_damping)
util.setFloat(shape, "defaultFriction", param.friction)
util.setFloat(shape, "defaultAngularDamping", param.angular_damping)
util.setFloat(shape, "defaultRestitution", param.restitution)
def _parentConstraint(self, a, b):
maya.cmds.select(a)
maya.cmds.select(b, tgl=True)
maya.cmds.parentConstraint(mo=True)
def _constraintRigidbody(self, shape, pCube, rigid, jointNames):
targetJoint = ""
if rigid.bone_index >= 0 and len(jointNames) > rigid.bone_index:
targetJoint = jointNames[rigid.bone_index]
if targetJoint != "":
if rigid.mode == 0: # ボーン追従
maya.cmds.setAttr("%s.bodyType" % shape, 1)
maya.cmds.parentConstraint(targetJoint, pCube, mo=True)
elif rigid.mode == 1: # 物理演算
maya.cmds.setAttr("%s.bodyType" % shape, 2)
#maya.cmds.parentConstraint(pCube, targetJoint, mo=True)
maya.cmds.pointConstraint(targetJoint, pCube, mo=True)
maya.cmds.orientConstraint(pCube, targetJoint, mo=True)
elif rigid.mode == 2: # 位置合わせ
maya.cmds.setAttr("%s.bodyType" % shape, 2)
maya.cmds.pointConstraint(targetJoint, pCube, mo=True)
maya.cmds.orientConstraint(pCube, targetJoint, mo=True)
def _setCollisionFilter(self, shape, rigid):
collisionGroup = 2**rigid.collision_group
noCollisionGroup = (-rigid.no_collision_group - 1) ^ 0xFFFF
maya.cmds.setAttr("%s.collisionFilterGroup" % shape, collisionGroup)
maya.cmds.setAttr("%s.collisionFilterMask" % shape, noCollisionGroup)
def _createRigidbodies(self, jointNames):
rigids = self.mmdData.rigidbodies
shapes = []
for i in range(len(rigids)):
rigid = rigids[i]
maya.cmds.select(d=True)
pCube = self._createPostureCube(rigid)
pCube = maya.cmds.rename(pCube, "rcube_" + self.nameDict[i])
maya.cmds.select(pCube)
shape = bullet.RigidBody.CreateRigidBody().executeCommandCB()[1]
shape = maya.cmds.rename(shape, "rigid_" + self.nameDict[i])
util.setJpName(shape, rigid.name)
self._setColliderSize(shape, rigid)
self._setParams(shape, rigid.param)
self._constraintRigidbody(shape, pCube, rigid, jointNames)
self._setCollisionFilter(shape, rigid)
shapes.append(shape)
return shapes
def _instantiateJoint(self, joint, shapes):
ai = joint.rigidbody_index_a
bi = joint.rigidbody_index_b
maya.cmds.select(shapes[ai])
maya.cmds.select(shapes[bi], tgl=True)
constraint = bullet.RigidBodyConstraint.CreateRigidBodyConstraint().executeCommandCB()[0]
maya.cmds.setAttr("%s.constraintType" % constraint, 5)
constraint = maya.cmds.rename(constraint, "joint_%s_%s" % (shapes[ai], shapes[bi]))
return constraint
def _constraintJointWithRigidbody(self, constraint, joint, jointNames):
ai = joint.rigidbody_index_b # bが正解
bi = self.mmdData.rigidbodies[ai].bone_index
if bi >= 0 and bi < len(jointNames):
parentBoneName = jointNames[bi]
try:
maya.cmds.pointConstraint(parentBoneName, constraint)
#pass
except:
print "Failed point constraint for joint solver: %s" % joint.name
def _setJointLimitation(self, constraint, minVector, maxVector, limitType, axis, i):
args = (constraint, limitType, axis)
minValue = minVector[i]
maxValue = maxVector[i]
if minVector[i] > maxVector[i]:
maya.cmds.setAttr("%s.%sConstraint%s" % args, 0) # Free
if minVector[i] == 0.0 and maxVector[i] == 0.0:
maya.cmds.setAttr("%s.%sConstraint%s" % args, 1) # Lock
else:
maya.cmds.setAttr("%s.%sConstraint%s" % args, 2) # Limit
maya.cmds.setAttr("%s.%sConstraintMin%s" % args, minValue)
maya.cmds.setAttr("%s.%sConstraintMax%s" % args, maxValue)
def _setSpringLimitation(self, constraint, limitVector, limitType, axis, i):
limitValue = limitVector[i] #self._convertCoordinate(limitVector[i], limitType, axis)
args = (constraint, limitType, axis)
#if limitValue > 0.0 or limitValue < 0.0: # この行が必要かどうかの判断がつかない
maya.cmds.setAttr("%s.%sSpringEnabled%s" % args, 1)
maya.cmds.setAttr("%s.%sSpringStiffness%s" % args, limitValue)
util.setFloat(constraint, "default%sSpringStiffness%s" % (limitType.title(), axis), limitValue)
def _createJoints(self, shapes, jointNames):
joints = self.mmdData.joints
constraintNames = []
solverNames = []
for i in range(len(joints)):
joint = joints[i]
constraint = self._instantiateJoint(joint, shapes)
try:
solverName = maya.cmds.rename(u"bulletRigidBodyConstraint1", "solver_%s" % self.nameDict[i])
self._constraintJointWithRigidbody(solverName, joint, jointNames)
solverNames.append(solverName)
except:
print "Failed rename joint solver: %s" % joint.name
axis = ["X", "Y", "Z"]
for i in range(3):
self._setJointLimitation(constraint, joint.translation_limit_min, joint.translation_limit_max, "linear", axis[i], i)
self._setJointLimitation(constraint, joint.rotation_limit_min, joint.rotation_limit_max, "angular", axis[i], i)
self._setSpringLimitation(constraint, joint.spring_constant_translation, "linear", axis[i], i)
self._setSpringLimitation(constraint, joint.spring_constant_rotation, "angular", axis[i], i)
constraintNames.append(constraint)
return solverNames
def generate(self, jointNames):
rigidShapes = self._createRigidbodies(jointNames)
constraintNames = self._createJoints(rigidShapes, jointNames)
return rigidShapes, constraintNames
|
GRGSIBERIA/mmd-transporter
|
mmd-transporter/importer/rigidgen.py
|
Python
|
gpl-2.0
| 8,714
|
# encoding: utf-8
#----------------------------------------
# file.py by Thomas Gläßle
#
# To the extent possible under law, the person who associated CC0 with
# file.py has waived all copyright and related or neighboring rights
# to file.py.
#
# You should have received a copy of the CC0 legalcode along with this
# work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
#----------------------------------------
"""
Resource provider for plain filesystem resources.
"""
__all__ = ['FileResource']
import os
from io import open
from contextlib import contextmanager
from .base import ResourceProvider
class FileResource(ResourceProvider):
"""
File system resource provider.
Uses the builtins open() to open ordinary files and os.listdir() to list
directory contents.
"""
def __init__(self, path):
"""
Initialize the filesystem resource provider.
:param string path: name of a filesystem object (file/folder).
"""
self.path = path
def open(self, name='', encoding=None):
if encoding is None:
return open(self._get_path(name), 'rb')
else:
return open(self._get_path(name), 'rt', encoding=encoding)
def listdir(self, name=''):
return os.listdir(self._get_path(name))
def get(self, name=''):
return self.__class__(self._get_path(name))
@contextmanager
def filename(self, name=''):
yield self._get_path(name)
def _get_path(self, name):
if not name:
return self.path
elif isinstance(name, list):
return os.path.join(self.path, *name)
else:
return os.path.join(self.path, *name.split('/'))
def provider(self):
return self.__class__(os.path.dirname(self.path))
|
pymad/cpymad
|
src/cern/resource/file.py
|
Python
|
apache-2.0
| 1,814
|
# Copyright (c) 2011, Shutterstock Images LLC.
# All rights reserved.
#
# This file is subject to the MIT License (see the LICENSE file).
import datetime
import json
import os
import txmongo
import validictory
from twisted.internet import defer
from twisted.python import log
from oplog import utils
PARSE_ERROR = -32700
INVALID_REQUEST = -32600
METHOD_NOT_FOUND = -32601
INVALID_PARAMS = -32602
INTERNAL_ERROR = -32603
SERVER_ERROR = -32099
ERRORS = {
PARSE_ERROR: 'Parse error', # Invalid JSON was received by the server.
INVALID_REQUEST: 'Invalid Request', # The JSON sent is not a valid Request object.
METHOD_NOT_FOUND: 'Method not found', # The method does not exist / is not available.
INVALID_PARAMS: 'Invalid params', # Invalid method parameter(s).
INTERNAL_ERROR: 'Internal error', # Internal JSON-RPC error.
SERVER_ERROR: 'Server error', # Reserved for implementation-defined server-errors.
}
class Error(Exception):
def __init__(self, code=INTERNAL_ERROR, message=None, http_code=400):
self.code = code
self.http_code = http_code
if not message:
message = ERRORS.get(code, INTERNAL_ERROR)
super(Error, self).__init__(message)
class ServerError(Error):
def __init__(self, message, code=SERVER_ERROR, **kwargs):
super(ServerError, self).__init__(code=code, message=message, **kwargs)
class Handler(object):
_schema = {}
_schema_list = [os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schema')]
def __init__(self, user, settings, db):
self.user = user
self.settings = settings
self.db = db
def err(self, message, error):
log.err('%s (%s): %s' % (message, type(error), error))
raise ServerError(message)
def load_schema(self, name):
for root in self._schema_list:
path = '%s.json' % os.path.join(root, name)
if os.path.isfile(path):
try:
with open(path) as f:
return json.loads(f.read())
except Exception, error:
log.err('Unable to parse schema: %s' % path)
log.err('No schema found for: %s' % name)
def validate(self, name, data):
if not name in self._schema:
schema = self.load_schema(name)
if schema:
self._schema[name] = schema
else:
schema = self._schema.get(name)
# Only validate if we have a schema defined
if schema:
try:
validictory.validate(data, schema, required_by_default=False)
except ValueError, error:
log.err('Validation failed because: %s' % error)
raise Error(INVALID_PARAMS)
@defer.inlineCallbacks
def __call__(self, params):
result = yield self.run(**params)
try:
defer.returnValue({'result': result})
except TypeError, error:
log.err('Unable to encode result: %s (%s)' % (error, result))
raise Error(INTERNAL_ERROR)
@defer.inlineCallbacks
def run(self, **kwargs):
raise Error(METHOD_NOT_FOUND)
class EntryHandler(Handler):
def backup(self, entry):
return self.db.entry_history.insert({
'date': datetime.datetime.utcnow(),
'body': entry,
'type': self.name,
})
class EntryDel(EntryHandler):
name = 'entry.del'
@defer.inlineCallbacks
def run(self, **values):
self.validate(self.name, values)
try:
values = utils.mongify.encode(values)
# Get old value so we can add to history
entry = yield self.db.entry.find_one(values)
if not entry:
raise Error('Entry with id "%s" not found' % values['_id'])
result = yield self.db.entry.remove(values, safe=True)
if not result.get('err'):
yield self.backup(entry)
defer.returnValue(result)
except Exception, error:
self.err('Failed to delete entry', error)
class EntryGet(EntryHandler):
name = 'entry.get'
def gen_sort(self, sort):
if not sort:
return
f = ()
for name, ordering in sort:
if ordering == -1:
f += txmongo.filter.DESCENDING(name)
elif ordering == 1:
f += txmongo.filter.ASCENDING(name)
return txmongo.filter.sort(*f) if f else None
@defer.inlineCallbacks
def run(self, **values):
self.validate(self.name, values)
find = values['find']
skip = values.get('skip', 0)
limit = values.get('limit', 20) # Default to 20 records
sort = values.get('sort', [])
fields = values.get('fields')
try:
sort = self.gen_sort(sort)
if '_id' in find:
find['_id'] = txmongo.ObjectId(find['_id'])
find = utils.mongify.encode(find)
results = yield self.db.entry.find(spec=find, skip=skip, limit=limit, filter=sort, fields=fields)
defer.returnValue(utils.mongify.decode(list(results)))
except Exception, error:
log.err('Get entry error: %s' % error)
raise ServerError('Failed to query entries')
class EntryPut(EntryHandler):
name = 'entry.put'
@defer.inlineCallbacks
def run(self, **values):
def validate(values):
# Validate entry.put schema
self.validate(self.name, values)
# Validate type schema
if '_type' in values and isinstance(values['_type'], basestring):
self.validate('entry.type.%s' % values['_type'], values)
try:
# Update if we have an _id, this operation is much less efficient
# than an insert because of the read, write and possible second
# write hack to get schema validation
if '_id' in values:
_id = txmongo.ObjectId(values.pop('_id'))
# Get old value so we can revert if schema fails or add to
# history collection
old_entry = yield self.db.entry.find_one({'_id': _id})
if not old_entry:
raise Error('Entry with id "%s" not found' % _id)
result = yield self.db.entry.update(
{'_id': _id, '_user': self.user},
# "clean" encode is relatively naive and probably adds
# little security, but attempts to disallow updates to base
# fields that start with an underscore
utils.mongify.encode(values, clean=True),
upsert=False,
safe=True,
)
# Get updated value so we can validate
new_entry = yield self.db.entry.find_one({'_id': _id})
try:
validate(utils.mongify.decode(new_entry))
except ValueError, error:
_id = old_entry.pop('_id')
yield self.db.entry.update({'_id': _id}, old_entry, upsert=False, safe=True)
raise error
else:
yield self.backup(old_entry)
else:
# Set default values
if not '_date' in values:
values['_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
# Enforce user
values['_user'] = self.user
validate(values)
result = yield self.db.entry.insert(utils.mongify.encode(values), safe=True)
defer.returnValue(utils.mongify.decode(result))
except Error, error:
raise error
except Exception, error:
self.err('Failed to put entry', error)
ROUTE = {
EntryDel.name: EntryDel,
EntryGet.name: EntryGet,
EntryPut.name: EntryPut,
}
def route(user, request, message):
return ROUTE.get(message['method'], Handler)(user, request.settings, request.mongo)(message['params'])
|
shutterstock/oplog
|
oplog/api.py
|
Python
|
mit
| 8,156
|
#!/usr/bin/env python
# standard library imports
from os.path import exists
# third party related imports
from setuptools import setup, find_packages
# local library imports
from mobile_push import __version__
def read_from_file(filename):
if exists(filename):
with open(filename) as f:
return f.read()
return ''
setup(
name='mobile-push',
version=__version__,
# Your name & email here
author='Yu Liang',
author_email='yu.liang@thekono.com',
# If you had mobile_push.tests, you would also include that in this list
packages=find_packages(),
# Any executable scripts, typically in 'bin'. E.g 'bin/do-something.py'
scripts=[],
# REQUIRED: Your project's URL
url='https://github.com/theKono/mobile-push',
# Put your license here. See LICENSE.txt for more information
license=read_from_file('LICENSE'),
# Put a nice one-liner description here
description='A mobile-push microservice (APNS, GCM)',
long_description=read_from_file('README.md'),
# Any requirements here, e.g. "Django >= 1.1.1"
install_requires=read_from_file('requirements.txt').split('\n'),
)
|
theKono/mobile-push
|
setup.py
|
Python
|
apache-2.0
| 1,163
|
import cgi
import json
import logging
from copy import deepcopy
from collections import OrderedDict
from lxml import etree
from pkg_resources import resource_string
from xmodule.x_module import XModule, XModuleDescriptor
from xmodule.stringify import stringify_children
from xmodule.mako_module import MakoModuleDescriptor
from xmodule.editing_module import EditingDescriptor
from xmodule.xml_module import XmlDescriptor
from xblock.fields import Scope, String, Integer, Dict, Boolean, List
from xmodule.contentstore.content import StaticContent
from xmodule.modulestore.mongo import draft
from xmodule.modulestore.exceptions import ItemNotFoundError, InvalidLocationError
from xmodule.modulestore import Location
import json
log = logging.getLogger(__name__)
class PollCompareFields(object):
display_name = String(help="Display name for this module", scope=Scope.settings)
# student_answers = Dict(help="All answers for for poll for student", scope=Scope.user_state)
# student_polls = Dict(help="All poll informations for student ever submited", scope=Scope.user_state)
# capa_ids = List(help="All referenced poll ids for this poll compare page", scope=Scope.user_state)
data = String(help="Html contents to display for this module", default=u"", scope=Scope.content)
# data = String(Help="Html contents to display for this module", default=u"", scope=Scope.content)
poll_compare_count = Integer(help="",default=0,scope=Scope.user_state)
compares = List(help="",scope=Scope.user_state)
# compare:{"from":"","to":""}
class PollCompareModule(PollCompareFields, XModule):
_tag_name = 'poll_compare'
_child_tag_name = 'compare'
_child_tag_name_answers = 'answers'
js = {
'coffee': [resource_string(__name__, 'js/src/javascript_loader.coffee'),
resource_string(__name__, 'js/src/collapsible.coffee')],
'js': [resource_string(__name__, 'js/src/poll_compare/logme.js'),
resource_string(__name__, 'js/src/poll_compare/poll_compare.js'),
resource_string(__name__, 'js/src/poll_compare/poll_compare_main.js')]
}
css = {'scss':[resource_string(__name__,'css/poll_compare/display.scss')]}
js_module_name = "PollCompare"
def handle_ajax(self, dispatch, data):
return json.dumps({'error': 'Unknown Command123!'})
def get_html(self):
if self.data is not None:
compares = self.definition_from_xml_string(self.data)
poll_compares,answers = self.dump_poll_compare(compares)
params = {
'element_id':self.location.html_id(),
'element_class':self.location.category,
'ajax_url':self.system.ajax_url,
'poll_compares':poll_compares,
'answers':answers
}
self.content = self.system.render_template('poll_compare.html', params)
return self.content
def dump_poll(self):
return null
def definition_from_xml_string(self,data):
try:
xml_object = etree.fromstring(data)
if len(xml_object.xpath(self._child_tag_name)) == 0:
raise ValueError("poll_compare definition must include at least one 'compare' tag")
xml_object_copy = deepcopy(xml_object)
compares = []
for element_compare in xml_object_copy.findall(self._child_tag_name):
from_loc = element_compare.get('from_loc', None)
to_loc = element_compare.get('to_loc', None)
compare_id = element_compare.get('compare_id', None)
display_name = element_compare.get("display_name", None)
if from_loc:
if to_loc:
compares.append({
'compare_id': compare_id,
'from_loc': from_loc,
'to_loc': to_loc,
'student_answers': {'from_loc':None,'to_loc':None},
'display_name': display_name
})
xml_object_copy.remove(element_compare)
for answers in xml_object_copy.findall(self._child_tag_name_answers):
if answers is not None:
answer_1 = answers.get('answer1', None)
answer_2 = answers.get('answer2', None)
answer_3 = answers.get('answer3', None)
answer_4 = answers.get('answer4', None)
answer_5 = answers.get('answer5', None)
compares.append({'answers':{
'answer1':answer_1,
'answer2':answer_2,
'answer3':answer_3,
'answer4':answer_4,
'answer5':answer_5
}
})
return compares
except etree.XMLSyntaxError as err:
pass
def dump_poll_compare(self,compares):
"""
Dump poll_compares
"""
if compares is None:
compares = {}
# compares_to_json = {}
compares_to_json = OrderedDict()
answers_to_display = OrderedDict()
for compare_element in compares:
tmp_item = {}
if compare_element.get('from_loc',None) is not None:
tmp_item['from_loc'] = compare_element['from_loc']
tmp_item['to_loc'] = compare_element['to_loc']
tmp_item['display_name'] = compare_element['display_name']
tmp_item['student_answers'] = compare_element['student_answers']
compares_to_json["{0}".format(compare_element['compare_id'])] = tmp_item
else:
answers = None
answers = compare_element.get('answers',None)
if answers is not None:
tmp_item = {}
for key in answers:
tmp_item[key] = answers[key]
answers_to_display['answers'] = tmp_item
else:
tmp_item = {}
tmp_item['answer1'] = "answer1"
tmp_item['answer2'] = "answer2"
tmp_item['answer3'] = "answer3"
tmp_item['answer4'] = "answer4"
tmp_item['answer5'] = "answer5"
answers_to_display['answers'] = tmp_item
_answers = answers_to_display.get('answers',None)
_json = json.dumps(compares_to_json, sort_keys=True, indent=2)
return _json,_answers
class PollCompareDescriptor(PollCompareFields, XmlDescriptor, EditingDescriptor):
_tag_name = 'poll_compare'
_child_tag_name = 'compare'
mako_template = 'widgets/poll_compare-edit.html'
module_class = PollCompareModule
js = {
'coffee': [resource_string(__name__, 'js/src/poll_compare/edit.coffee')]
}
css = {
'scss': [resource_string(__name__,'css/editor/edit.scss'),
resource_string(__name__,'css/poll_compare/edit.scss')]
}
js_module_name = "POLLCompareEditingDescriptor"
filename_extension = "xml"
template_dir_name = "poll_compare"
@classmethod
def load_definition(cls, xml_object, system, location):
'''Load a descriptor from the specified xml_object:
If there is a filename attribute, load it as a string, and
log a warning if it is not parseable by etree.HTMLParser.
If there is not a filename attribute, the definition is the body
of the xml_object, without the root tag (do not want <html> in the
middle of a page)
'''
filename = xml_object.get('filename')
if filename is None:
definition_xml = deepcopy(xml_object)
cls.clean_metadata_from_xml(definition_xml)
return {'data': stringify_children(definition_xml)}, []
else:
# html is special. cls.filename_extension is 'xml', but
# if 'filename' is in the definition, that means to load
# from .html
# 'filename' in html pointers is a relative path
# (not same as 'html/blah.html' when the pointer is in a directory itself)
pointer_path = "{category}/{url_path}".format(
category='poll_compare',
url_path=name_to_pathname(location.name)
)
base = path(pointer_path).dirname()
# log.debug("base = {0}, base.dirname={1}, filename={2}".format(base, base.dirname(), filename))
filepath = "{base}/{name}.html".format(base=base, name=filename)
# log.debug("looking for html file for {0} at {1}".format(location, filepath))
# VS[compat]
# TODO (cpennington): If the file doesn't exist at the right path,
# give the class a chance to fix it up. The file will be written out
# again in the correct format. This should go away once the CMS is
# online and has imported all current (fall 2012) courses from xml
if not system.resources_fs.exists(filepath):
candidates = cls.backcompat_paths(filepath)
# log.debug("candidates = {0}".format(candidates))
for candidate in candidates:
if system.resources_fs.exists(candidate):
filepath = candidate
break
try:
with system.resources_fs.open(filepath) as file:
html = file.read().decode('utf-8')
# Log a warning if we can't parse the file, but don't error
# if not check_html(html) and len(html) > 0:
# msg = "Couldn't parse html in {0}, content = {1}".format(filepath, html)
# log.warning(msg)
# system.error_tracker("Warning: " + msg)
definition = {'data': html}
# TODO (ichuang): remove this after migration
# for Fall 2012 LMS migration: keep filename (and unmangled filename)
definition['filename'] = [filepath, filename]
return definition, []
except (ResourceNotFoundError) as err:
msg = 'Unable to load file contents at path {0}: {1} '.format(
filepath, err)
# add more info and re-raise
raise Exception(msg), None, sys.exc_info()[2]
@classmethod
def backcompat_paths(cls, path):
if path.endswith('.html.xml'):
path = path[:-9] + '.html' # backcompat--look for html instead of xml
if path.endswith('.html.html'):
path = path[:-5] # some people like to include .html in filenames..
candidates = []
while os.sep in path:
candidates.append(path)
_, _, path = path.partition(os.sep)
# also look for .html versions instead of .xml
nc = []
for candidate in candidates:
if candidate.endswith('.xml'):
nc.append(candidate[:-4] + '.html')
return candidates + nc
def get_context(self):
_context = EditingDescriptor.get_context(self)
_context.update({'test':self.data})
if self.data == u'':
template_data='<poll_compare><compare compare_id="compare_1" from_loc="i4x://[org]/[course]/[category]/[url_name]" to_loc="i4x://[org]/[course]/[category]/[url_name]" display_name="test1"></compare><compare compare_id="compare_2" from_loc="i4x://[org]/[course]/[category]/[url_name]" to_loc="i4x://[org]/[course]/[category]/[url_name]" display_name="test2"></compare></poll_compare>'
_context.update({'test':template_data})
_context.update({'base_asset_url': StaticContent.get_base_url_path_for_course_assets(self.location) + '/'})
return _context
def definition_to_xml(self, resource_fs):
if self.data is None:
return None
xml_object = etree.fromstring(self.data)
# xml_object.set('display_name', self.display_name)
return xml_object
|
EduPepperPDTesting/pepper2013-testing
|
common/lib/xmodule/xmodule/templates/poll_compare_module.py
|
Python
|
agpl-3.0
| 10,491
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
STORAGE_ACCOUNT_NAME = "fakename"
STORAGE_ACCOUNT_KEY = "fakekey"
TENANT_ID = "00000000-0000-0000-0000-000000000000"
CLIENT_ID = "00000000-0000-0000-0000-000000000000"
CLIENT_SECRET = "00000000-0000-0000-0000-000000000000"
ACCOUNT_URL_SUFFIX = 'core.windows.net'
RUN_IN_LIVE = "False"
SKIP_LIVE_RECORDING = "True"
PROTOCOL = "https"
|
Azure/azure-sdk-for-python
|
sdk/storage/azure-storage-queue/tests/settings/settings_fake.py
|
Python
|
mit
| 647
|
from __future__ import division, print_function
import cPickle as pickle
from blocks.extensions.saveload import Checkpoint, SAVED_TO
from blocks.serialization import secure_dump
class PartsOnlyCheckpoint(Checkpoint):
def __init__(self, path, **kwargs):
super(PartsOnlyCheckpoint, self).__init__(path=path, **kwargs)
self.iteration = 1
def do(self, callback_name, *args):
"""Pickle the save_separately parts (and not the main loop object) to disk.
If `*args` contain an argument from user, it is treated as
saving path to be used instead of the one given at the
construction stage.
"""
_, from_user = self.parse_args(callback_name, args)
try:
path = self.path
if from_user:
path, = from_user
### this line is disabled from superclass impl to bypass using blocks.serialization.dump
### because pickling main thusly causes pickling error:
### "RuntimeError: maximum recursion depth exceeded while calling a Python object"
# secure_dump(self.main_loop, path, use_cpickle=self.use_cpickle)
filenames = self.save_separately_filenames(path)
for attribute in self.save_separately:
secure_dump(getattr(self.main_loop, attribute), filenames[attribute] + '_%d.pkl' % self.iteration, pickle.dump, protocol=pickle.HIGHEST_PROTOCOL)
self.iteration += 1
except Exception:
path = None
raise
finally:
already_saved_to = self.main_loop.log.current_row.get(SAVED_TO, ())
self.main_loop.log.current_row[SAVED_TO] = (already_saved_to + (path,))
|
ablavatski/draw
|
draw/partsonlycheckpoint.py
|
Python
|
mit
| 1,714
|
"""The tests for the notify demo platform."""
import tempfile
import unittest
from homeassistant.bootstrap import setup_component
import homeassistant.components.notify as notify
from homeassistant.components.notify import demo
from homeassistant.helpers import script
from homeassistant.util import yaml
from tests.common import get_test_home_assistant
class TestNotifyDemo(unittest.TestCase):
"""Test the demo notify."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.assertTrue(setup_component(self.hass, notify.DOMAIN, {
'notify': {
'platform': 'demo'
}
}))
self.events = []
self.calls = []
def record_event(event):
"""Record event to send notification."""
self.events.append(event)
self.hass.bus.listen(demo.EVENT_NOTIFY, record_event)
def tearDown(self): # pylint: disable=invalid-name
""""Stop down everything that was started."""
self.hass.stop()
def record_calls(self, *args):
"""Helper for recording calls."""
self.calls.append(args)
def test_sending_none_message(self):
"""Test send with None as message."""
notify.send_message(self.hass, None)
self.hass.block_till_done()
self.assertTrue(len(self.events) == 0)
def test_sending_templated_message(self):
"""Send a templated message."""
self.hass.states.set('sensor.temperature', 10)
notify.send_message(self.hass, '{{ states.sensor.temperature.state }}',
'{{ states.sensor.temperature.name }}')
self.hass.block_till_done()
last_event = self.events[-1]
self.assertEqual(last_event.data[notify.ATTR_TITLE], 'temperature')
self.assertEqual(last_event.data[notify.ATTR_MESSAGE], '10')
def test_method_forwards_correct_data(self):
"""Test that all data from the service gets forwarded to service."""
notify.send_message(self.hass, 'my message', 'my title',
{'hello': 'world'})
self.hass.block_till_done()
self.assertTrue(len(self.events) == 1)
data = self.events[0].data
assert {
'message': 'my message',
'target': None,
'title': 'my title',
'data': {'hello': 'world'}
} == data
def test_calling_notify_from_script_loaded_from_yaml_without_title(self):
"""Test if we can call a notify from a script."""
yaml_conf = """
service: notify.notify
data:
data:
push:
sound: US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav
data_template:
message: >
Test 123 {{ 2 + 2 }}
"""
with tempfile.NamedTemporaryFile() as fp:
fp.write(yaml_conf.encode('utf-8'))
fp.flush()
conf = yaml.load_yaml(fp.name)
script.call_from_config(self.hass, conf)
self.hass.block_till_done()
self.assertTrue(len(self.events) == 1)
assert {
'message': 'Test 123 4',
'target': None,
'data': {
'push': {
'sound':
'US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav'}}
} == self.events[0].data
def test_calling_notify_from_script_loaded_from_yaml_with_title(self):
"""Test if we can call a notify from a script."""
yaml_conf = """
service: notify.notify
data:
data:
push:
sound: US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav
data_template:
title: Test
message: >
Test 123 {{ 2 + 2 }}
"""
with tempfile.NamedTemporaryFile() as fp:
fp.write(yaml_conf.encode('utf-8'))
fp.flush()
conf = yaml.load_yaml(fp.name)
script.call_from_config(self.hass, conf)
self.hass.pool.block_till_done()
self.assertTrue(len(self.events) == 1)
assert {
'message': 'Test 123 4',
'title': 'Test',
'target': None,
'data': {
'push': {
'sound':
'US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav'}}
} == self.events[0].data
def test_targets_are_services(self):
"""Test that all targets are exposed as individual services."""
self.assertIsNotNone(self.hass.services.has_service("notify", "demo"))
service = "demo_test_target_name"
self.assertIsNotNone(self.hass.services.has_service("notify", service))
def test_messages_to_targets_route(self):
"""Test message routing to specific target services."""
self.hass.bus.listen_once("notify", self.record_calls)
self.hass.services.call("notify", "demo_test_target_name",
{'message': 'my message',
'title': 'my title',
'data': {'hello': 'world'}})
self.hass.block_till_done()
data = self.calls[0][0].data
assert {
'message': 'my message',
'target': 'test target id',
'title': 'my title',
'data': {'hello': 'world'}
} == data
|
hexxter/home-assistant
|
tests/components/notify/test_demo.py
|
Python
|
mit
| 5,298
|
#
# Kiwi: a Framework and Enhanced Widgets for Python
#
# Copyright (C) 2005 Async Open Source
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
# Author(s): Johan Dahlin <jdahlin@async.com.br>
"""Reimplementation of GtkComboBoxEntry in Python.
The main difference between the L{BaseComboBoxEntry} and GtkComboBoxEntry
is that a {kiwi.ui.widgets.entry.Entry} is used instead of a GtkEntry."""
import gobject
import gtk
from kiwi.python import deprecationwarn
from kiwi.ui.entry import KiwiEntry
class BaseComboBoxEntry(gtk.ComboBox):
def __init__(self, model=None, text_column=-1):
deprecationwarn(
'ComboBoxEntry is deprecated, use ComboEntry instead',
stacklevel=3)
gtk.ComboBox.__init__(self)
self.entry = KiwiEntry()
# HACK: We need to set a private variable, this seems to
# be the only way of doing so
self.entry.start_editing(gtk.gdk.Event(gtk.gdk.BUTTON_PRESS))
self.add(self.entry)
self.entry.show()
self._text_renderer = gtk.CellRendererText()
self.pack_start(self._text_renderer, True)
self.set_active(-1)
self.entry_changed_id = self.entry.connect('changed',
self._on_entry__changed)
self._active_changed_id = self.connect("changed",
self._on_entry__active_changed)
self._has_frame_changed(None)
self.connect("notify::has-frame", self._has_frame_changed)
if not model:
model = gtk.ListStore(str)
text_column = 0
self.set_model(model)
self.set_text_column(text_column)
# Virtual methods
def do_mnemnoic_activate(self, group_cycling):
self.entry.grab_focus()
return True
def do_grab_focus(self):
self.entry.grab_focus()
# Signal handlers
def _on_entry__active_changed(self, combobox):
iter = combobox.get_active_iter()
if not iter:
return
self.entry.handler_block(self.entry_changed_id)
model = self.get_model()
self.entry.set_text(model[iter][self._text_column])
self.entry.handler_unblock(self.entry_changed_id)
def _has_frame_changed(self, pspec):
has_frame = self.get_property("has-frame")
self.entry.set_has_frame(has_frame)
def _on_entry__changed(self, entry):
self.handler_block(self._active_changed_id)
self.set_active(-1)
self.handler_unblock(self._active_changed_id)
# Public API
def set_text_column(self, text_column):
self._text_column = text_column
if text_column != -1:
self.set_attributes(self._text_renderer, text=text_column)
def get_text_column(self):
return self._text_column
# IconEntry
def set_pixbuf(self, pixbuf):
self.entry.set_pixbuf(pixbuf)
def update_background(self, color):
self.entry.update_background(color)
def get_background(self):
return self.entry.get_background()
def get_icon_window(self):
return self.entry.get_icon_window()
gobject.type_register(BaseComboBoxEntry)
def test():
win = gtk.Window()
win.connect('delete-event', gtk.main_quit)
e = BaseComboBoxEntry()
win.add(e)
m = gtk.ListStore(str)
m.append(['foo'])
m.append(['bar'])
m.append(['baz'])
e.set_model(m)
win.show_all()
gtk.main()
if __name__ == '__main__':
test()
|
fboender/miniorganizer
|
src/lib/kiwi/ui/comboboxentry.py
|
Python
|
gpl-3.0
| 4,180
|
#!/usr/bin/env python
"""
OctoKissUpload - Upload Gcode to Octoprint
Based on: https://github.com/quillford/OctoUpload
Todo
1: SSL support
"""
version = "0.01"
import sys
import os
import base64
import socket
import urllib
import urllib2
import mimetools
hostIP = '192.168.88.14'
octoPort = '5000'
apiKey = '7CC15816FBE7459XXXXXXXXXX'
sendLoc = 'local' # or 'sdcard'
gcodeExt = 'gcode'
sslBool = 'no'
printBool = 'no'
selectBool = 'yes'
timeout = 15
socket.setdefaulttimeout(timeout)
def prepare_file(infile, gcodeExt=gcodeExt):
#remove extension user may have used on the filename
outputName = infile.split(".")[0]
print outputName
#remove . user may have used on extension
if gcodeExt.find(".") != -1:
gcodeExt = gcodeExt.split(".")[1]
#print gcodeExt
#add extension user specifies
if gcodeExt == "g":
outputName = outputName + "." + gcodeExt
elif gcodeExt == "gco":
outputName = outputName + "." + gcodeExt
else:
outputName = outputName + ".gcode"
print "Ext: " + outputName
send_file(outputName)
def send_file(filename,
sslBool=sslBool,
sendLoc=sendLoc,
printBool=printBool,
selectBool=selectBool,
apiKey=apiKey,
hostIP=hostIP,
octoPort=octoPort):
#username = "spec"
#password = "password"
outputName = os.path.split(filename)[1]
#allows for SSL if user specifies
if sslBool == "yes":
protocol = "https://"
else:
protocol = "http://"
#sends the gcode to either sd or local
if sendLoc == "sdcard":
url = protocol + hostIP + ":" + octoPort + "/api/files/sdcard"
else:
url = protocol + hostIP + ":" + octoPort + "/api/files/local"
#makes sure user submits a valid option for selecting
if selectBool != ('yes' or 'no'):
selectBool = 'no'
print "Select: " + selectBool
#makes sure user submits a valid option for printing
if printBool != ('yes' or 'no'):
printBool = 'no'
print "Print: " + selectBool
filebody = open(filename, 'rb').read()
mimetype = 'application/octet-stream'
boundary = mimetools.choose_boundary()
content_type = 'multipart/form-data; boundary=%s' % boundary
body = []
body_boundary = '--' + boundary
body = [ body_boundary,
'Content-Disposition: form-data; name="file"; filename="%s"' % outputName,
'Content-Type: %s' % mimetype,
'',
filebody,
'--' + boundary,
'Content-Disposition: form-data; name="select"',
'',
selectBool,
'--' + boundary,
'Content-Disposition: form-data; name="print"',
'',
printBool,
]
body.append('--' + boundary + '--')
body.append('')
body = '\r\n'.join(body)
req = urllib2.Request(url)
# Uncomment below two lines for basic auth support. (Used in cases where haproxy is in front of octoprint, with basic auth enabled).
#base64string = base64.encodestring('%s:%s' % (username, password))
#req.add_header("Authorization", "Basic %s" % base64string)
req.add_header('User-agent', 'Cura AutoUploader Plugin')
req.add_header('Content-type', content_type)
req.add_header('Content-length', len(body))
req.add_header('X-Api-Key', apiKey)
req.add_data(body)
print "Uploading..."
print urllib2.urlopen(req).read()
print "Done"
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit('usage: OctoKissUpload.py <filename>')
infilename = sys.argv[1]
prepare_file(infilename)
|
artekw/OctoKissUpload
|
OctoKissUpload.py
|
Python
|
mit
| 3,278
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
parse_duration,
)
class MojvideoIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?mojvideo\.com/video-(?P<display_id>[^/]+)/(?P<id>[a-f0-9]+)'
_TEST = {
'url': 'http://www.mojvideo.com/video-v-avtu-pred-mano-rdecelaska-alfi-nipic/3d1ed4497707730b2906',
'md5': 'f7fd662cc8ce2be107b0d4f2c0483ae7',
'info_dict': {
'id': '3d1ed4497707730b2906',
'display_id': 'v-avtu-pred-mano-rdecelaska-alfi-nipic',
'ext': 'mp4',
'title': 'V avtu pred mano rdečelaska - Alfi Nipič',
'thumbnail': 're:^http://.*\.jpg$',
'duration': 242,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
# XML is malformed
playerapi = self._download_webpage(
'http://www.mojvideo.com/playerapi.php?v=%s&t=1' % video_id, display_id)
if '<error>true</error>' in playerapi:
error_desc = self._html_search_regex(
r'<errordesc>([^<]*)</errordesc>', playerapi, 'error description', fatal=False)
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_desc), expected=True)
title = self._html_search_regex(
r'<title>([^<]+)</title>', playerapi, 'title')
video_url = self._html_search_regex(
r'<file>([^<]+)</file>', playerapi, 'video URL')
thumbnail = self._html_search_regex(
r'<preview>([^<]+)</preview>', playerapi, 'thumbnail', fatal=False)
duration = parse_duration(self._html_search_regex(
r'<duration>([^<]+)</duration>', playerapi, 'duration', fatal=False))
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
}
|
apllicationCOM/youtube-dl-api-server
|
youtube_dl_server/youtube_dl/extractor/mojvideo.py
|
Python
|
unlicense
| 2,093
|
#Es necesario cambiar estos datos por los parametros de nuestro servidor, usuarios, password
userDb = "userDb"
passDb = "passDb"
mail = "*********@gmail.com"
passMail = "passMail"
nameDb = "domotics_db"
urlDb = "urlDb"
serverPort = 8080
#Security Code Device
updateCode = "UPDATE device SET code = '%s' WHERE id = '%s' AND (code = '%s' OR connectionStatus = 0)"
updateCodeRemote = "UPDATE device SET code = '%s' WHERE idDevice = '%s'"
#manage Port
selectGetPort = "SELECT port FROM device WHERE id = '%s' AND code ='%s'"
#Remotes
selectGetDevicesRemote = "SELECT deviceRemote.id AS id, deviceRemote.pipeSend AS pipeSend, deviceRemote.pipeRecv AS pipeRecv, deviceRemote.type AS type FROM device deviceRemote, device deviceCentral WHERE deviceRemote.idDevice = deviceCentral.id AND deviceCentral.id = '%s' AND deviceCentral.code = '%s'"
#Type device
selectGetTypeDevice = "SELECT type FROM device WHERE id = '%s' AND code ='%s'"
#Get User id
selectUserId = "SELECT id FROM user WHERE login = '%s' AND password = '%s'"
#Check users and mails
selectUserExists = "SELECT login FROM user WHERE login = '%s'"
selectMailExists = "SELECT login FROM user WHERE mail = '%s'"
selectUserExistsCheck = "SELECT login FROM user WHERE login = '%s' AND active = '1'"
selectMailExistsWithoutCheck = "SELECT login FROM user WHERE mail = '%s' AND active != '1'"
#SignIn user
insertSignIn = "INSERT INTO user (login, name, mail, password, active) VALUES ('%s', '%s', '%s', '%s', '%d')"
updateSignIn = "UPDATE user SET login = '%s', name = '%s', password = '%s', active = '%d' WHERE mail = '%s'"
#Check SignIn
updateCheckSignIn = "UPDATE user SET active = 1 WHERE login = '%s' AND password = '%s' AND active = '%s'"
#LogIn
selectLogIn = "SELECT id, name, active FROM user WHERE login = '%s' AND password = '%s' AND active = '1'"
#List locations of user
selectLocationsUser = "SELECT location.id AS id, location.name AS name, location.security AS security FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s'"
#Check Device User
checkDeviceUser = "SELECT device.id AS idDevice FROM user, device, userLocation, locationDevice WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = user.id AND user.id = '%s' AND device.id = '%s'"
#Check Location User
checkLocationUser = "SELECT userLocation.idLocation AS idLocation FROM userLocation WHERE userLocation.idUser = '%s' AND userLocation.idLocation = '%s'"
#list devices of locations and user
selectDevicesLocation = "SELECT device.id AS id, device.name AS name, device.publicIp AS publicIp, device.privateIp AS privateIp, device.port AS port, DATE_FORMAT(device.timeStamp,'%%d/%%m/%%Y %%H:%%i:%%s') AS timeStamp, device.connectionStatus AS connectionStatus, device.RIPMotion AS RIPMotion, device.alarm AS alarm, device.type AS type, device.idDevice AS idDevice, device.pipeSend AS pipeSend, device.pipeRecv AS pipeRecv, device.code AS code, device.connectionMode AS connectionMode, device.version AS version FROM user, location, device, userLocation, locationDevice WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = location.id AND location.id = '%s' AND location.id = userLocation.idLocation AND userLocation.idUser = user.id AND user.id = '%s'"
#create new location
selectCheckLocationUser = "SELECT location.name AS name FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s' AND location.name = '%s'"
insertLocation = "INSERT INTO location (name, security) VALUES ('%s','1')"
insertLocationUser = "INSERT INTO userLocation (idUser, idLocation) VALUES ('%s','%s')"
#edit location
selectCheckUpdateLocationUser = "SELECT location.name AS name FROM user, location, userLocation WHERE userLocation.idUser = user.id AND userLocation.idLocation = location.id AND user.id = '%s' AND location.name = '%s' AND location.id != '%s'"
updateLocation = "UPDATE location SET name = '%s' WHERE id = '%s'"
updateLocationSecurity = "UPDATE location SET security = '%s' WHERE id = '%s'"
#delete location
deleteUserLocation = "DELETE FROM userLocation WHERE idLocation = '%s'"
deleteLocation = "DELETE FROM location WHERE id = '%s'"
#insert device
insertDeviceServer = "INSERT INTO device (name, port, timeStamp, type, idDevice) VALUES ('%s', '%s', NOW(), '%s', '%s')"
insertLocationDevice = "INSERT INTO locationDevice (idLocation, idDevice) VALUES ('%s', '%s')"
#Update Devices
updateDevice = "UPDATE device SET name = '%s', port = '%s', connectionMode = '%s', RIPMotion = '%s' WHERE id = '%s'"
updateDevicePipes = "UPDATE device SET pipeSend = '%s', pipeRecv = '%s' WHERE id = '%s'"
updateIpDevice = "UPDATE device SET publicIp = '%s', privateIp = '%s' WHERE id = '%s' AND code = '%s'"
updateNotOnline = "UPDATE device SET connectionStatus = '0' WHERE connectionStatus != '0' AND TIMEDIFF(NOW(), device.timeStamp) > TIME('00:01:00')"
updateOnline = "UPDATE device SET connectionStatus = '%s', device.timeStamp = NOW() WHERE id = '%s' AND code = '%s'"
#Check Device Remote for Delete
checkDeviceRemote = "SELECT id FROM device WHERE idDevice = '%s'"
#Delete devices
deleteTimerDevice = "DELETE FROM timer WHERE idDevice = '%s'"
deleteAlertDevice = "DELETE FROM alert WHERE idDevice = '%s'"
deleteSensorsData = "DELETE FROM sensors WHERE idDevice = '%s'"
deleteLocationDevice = "DELETE FROM locationDevice WHERE idDevice = '%s'"
deleteDevice = "DELETE FROM device WHERE id = '%s'"
#Security
selectLocationSecurity = "SELECT user.mail AS email, user.name AS nameUser, location.id AS idLocation, location.security AS security, location.name AS nameLocation, device.name AS nameDevice, device.RIPMotion AS RIPMotion, device.alarm AS alarm FROM location, device, locationDevice, userLocation, user WHERE device.id = locationDevice.idDevice AND locationDevice.idLocation = location.id AND device.id ='%s' AND device.code = '%s' AND userLocation.idLocation = location.id AND userLocation.idUser = user.id"
updateAlarm = "UPDATE device SET alarm = '%s' WHERE id = '%s'"
selectDevicesLocationOpenPort = "SELECT device.id AS id, device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND device.RIPMotion = '1'"
selectDevicesLocationUserOpenPort = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice, userLocation WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND userLocation.idLocation = locationDevice.idLocation AND userLocation.idUser = '%s'"
selectDevicesOtherLocationOpenPort = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation <> '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1'"
selectDevicesLocationOpenPortCameras = "SELECT device.publicIp AS publicIp, device.port AS port, device.name AS name, device.code AS code FROM device, locationDevice WHERE locationDevice.idLocation = '%s' AND locationDevice.idDevice = device.id AND device.connectionStatus = '1' AND device.type = '2'"
checkDeviceAlarmStatus = "SELECT alarm FROM device WHERE id = '%s' AND code ='%s'"
#Alert
insertAlert = "INSERT INTO alert (date, time, type, idDevice) VALUES (CURRENT_DATE(), CURRENT_TIME(), '%s', '%s')"
checkInsertAlert = "SELECT id FROM alert WHERE alert.type = '%s' AND alert.idDevice = '%s' AND alert.date = CURRENT_DATE() AND CURRENT_TIME()-alert.time < TIME('00:02:00')"
selectAlert = "SELECT DATE_FORMAT(alert.date,'%%d/%%m/%%Y') AS date, DATE_FORMAT(alert.time,'%%H:%%i') AS time, alert.type AS type FROM device, alert, locationDevice, userLocation WHERE device.id = alert.idDevice AND device.id = '%s' AND alert.date = STR_TO_DATE('%s','%%d/%%m/%%Y') AND locationDevice.idDevice = device.id AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = '%s' ORDER BY alert.id DESC"
#Sensors
insertSensors = "INSERT INTO sensors (temperature, humidity, pressure, brightness, date, time, idDevice) VALUES ('%s', '%s', '%s', '%s', CURRENT_DATE(), CURRENT_TIME(), '%s')"
selectSensors = "SELECT temperature, humidity, pressure, brightness, DATE_FORMAT(sensors.time,'%%H:%%i') AS time FROM device, sensors, locationDevice, userLocation WHERE device.id = sensors.idDevice AND device.id = '%s' AND sensors.date = STR_TO_DATE('%s','%%d/%%m/%%Y') AND locationDevice.idDevice = device.id AND locationDevice.idLocation = userLocation.idLocation AND userLocation.idUser = '%s' ORDER BY sensors.id DESC"
#Timer
selectTimer = "SELECT id, name, active, DATE_FORMAT(time,'%%H:%%i') AS time, action FROM timer WHERE idDevice = '%s' ORDER BY time"
insertTimer = "INSERT INTO timer (name, active, time, action, idDevice) VALUES ('%s', '1', '%s', '%s', '%s')"
updateTimer = "UPDATE timer SET name = '%s', active = '%s', time = '%s', action = '%s' WHERE id = '%s' and idDevice = '%s'"
deleteTimer = "DELETE FROM timer WHERE id = '%s' and idDevice = '%s'"
selectTimerAutomation = "SELECT timer.action AS action, CURRENT_TIME()-timer.time AS diff FROM timer, device WHERE timer.idDevice = '%s' AND timer.idDevice = device.id AND device.code = '%s' AND timer.active = '1' AND CURRENT_TIME()-timer.time < TIME('00:01:00') AND CURRENT_TIME > timer.time ORDER BY 1"
#SoftwareUpdate
selectDeviceVersion = "SELECT version FROM device WHERE id = '%s' AND code ='%s'"
updateVersionDevice = "UPDATE device SET version = '%s' WHERE id = '%s' AND code = '%s'"
|
PascualArroyo/Domotics
|
Server/myconfig.py
|
Python
|
gpl-2.0
| 9,785
|
"""
Tests for the InstructorService
"""
import json
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from courseware.models import StudentModule
from instructor.services import InstructorService
from instructor.tests.test_tools import msk_from_problem_urlname
from nose.plugins.attrib import attr
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
@attr('shard_1')
class InstructorServiceTests(ModuleStoreTestCase):
"""
Tests for the InstructorService
"""
def setUp(self):
super(InstructorServiceTests, self).setUp()
self.course = CourseFactory.create()
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.problem_location = msk_from_problem_urlname(
self.course.id,
'robot-some-problem-urlname'
)
self.other_problem_location = msk_from_problem_urlname(
self.course.id,
'robot-some-other_problem-urlname'
)
self.problem_urlname = unicode(self.problem_location)
self.other_problem_urlname = unicode(self.other_problem_location)
self.service = InstructorService()
self.module_to_reset = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 2}),
)
def test_reset_student_attempts_delete(self):
"""
Test delete student state.
"""
# make sure the attempt is there
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
1
)
self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.problem_urlname
)
# make sure the module has been deleted
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
0
)
def test_reset_bad_content_id(self):
"""
Negative test of trying to reset attempts with bad content_id
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_bad_user(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
'bad_student',
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_non_existing_attempt(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.other_problem_urlname
)
self.assertIsNone(result)
|
jazztpt/edx-platform
|
lms/djangoapps/instructor/tests/test_services.py
|
Python
|
agpl-3.0
| 3,469
|
# ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
from unittest import TestCase, main
from lucene import *
class TermRangeQueryTestCase(TestCase):
"""
Unit tests ported from Java Lucene
"""
def _initializeIndex(self, values):
writer = IndexWriter(self.dir, WhitespaceAnalyzer(), True,
IndexWriter.MaxFieldLength.LIMITED)
for value in values:
self._insertDoc(writer, value)
writer.close()
def _insertDoc(self, writer, content):
doc = Document()
doc.add(Field("id", "id" + str(self.docCount),
Field.Store.YES, Field.Index.NOT_ANALYZED))
doc.add(Field("content", content,
Field.Store.NO, Field.Index.ANALYZED))
writer.addDocument(doc)
self.docCount += 1
def _addDoc(self, content):
writer = IndexWriter(self.dir, WhitespaceAnalyzer(), False,
IndexWriter.MaxFieldLength.LIMITED)
self._insertDoc(writer, content)
writer.close()
def setUp(self):
self.docCount = 0
self.dir = RAMDirectory()
def testExclusive(self):
query = TermRangeQuery("content", "A", "C", False, False)
self._initializeIndex(["A", "B", "C", "D"])
searcher = IndexSearcher(self.dir, True)
topDocs = searcher.search(query, 50)
self.assertEqual(1, topDocs.totalHits,
"A,B,C,D, only B in range")
searcher.close()
self._initializeIndex(["A", "B", "D"])
searcher = IndexSearcher(self.dir, True)
topDocs = searcher.search(query, 50)
self.assertEqual(1, topDocs.totalHits,
"A,B,D, only B in range")
searcher.close()
self._addDoc("C")
searcher = IndexSearcher(self.dir, True)
topDocs = searcher.search(query, 50)
self.assertEqual(1, topDocs.totalHits,
"C added, still only B in range")
searcher.close()
def testInclusive(self):
query = TermRangeQuery("content", "A", "C", True, True)
self._initializeIndex(["A", "B", "C", "D"])
searcher = IndexSearcher(self.dir, True)
topDocs = searcher.search(query, 50)
self.assertEqual(3, topDocs.totalHits,
"A,B,C,D - A,B,C in range")
searcher.close()
self._initializeIndex(["A", "B", "D"])
searcher = IndexSearcher(self.dir, True)
topDocs = searcher.search(query, 50)
self.assertEqual(2, topDocs.totalHits,
"A,B,D - A and B in range")
searcher.close()
self._addDoc("C")
searcher = IndexSearcher(self.dir, True)
topDocs = searcher.search(query, 50)
self.assertEqual(3, topDocs.totalHits,
"C added - A, B, C in range")
searcher.close()
if __name__ == "__main__":
import sys, lucene
lucene.initVM()
if '-loop' in sys.argv:
sys.argv.remove('-loop')
while True:
try:
main()
except:
pass
else:
main()
|
fnp/pylucene
|
test/test_TermRangeQuery.py
|
Python
|
apache-2.0
| 3,825
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from .forms import UserChangeForm, UserCreationForm
from .models import User
class UserAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Permissions'), {'fields': ('is_superuser', 'user_permissions')})
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')}
),
)
form = UserChangeForm
add_form = UserCreationForm
list_display = ('email', 'is_staff')
list_filter = ('is_superuser',)
search_fields = ('email',)
ordering = ('email',)
filter_horizontal = ('groups', 'user_permissions',)
admin.site.register(User, UserAdmin)
|
asi-pwr/plns
|
plns/users/admin.py
|
Python
|
mit
| 830
|
"""
Parallelization utilities
"""
from functools import reduce
import multiprocessing
class Sketch(object):
@staticmethod
def initializer(cls, *args):
"""
FIXME: use of global not really nice (possible root of mysterious issue for user)
"""
global sketch_constructor
def sketch_constructor():
return cls(*args)
@staticmethod
def map_sequence(sequence):
"""
- sequence: a bytes-like object
return: a sketch
"""
mhs = sketch_constructor()
mhs.add(sequence)
return mhs
@staticmethod
def map_sequences(iterable):
"""
- iterable: an iterable of bytes-like objects
return: a sketch
"""
mhs = sketch_constructor()
for sequence in iterable:
mhs.add(sequence)
return mhs
@staticmethod
def reduce(a, b):
"""
Update the sketch a with the content of sketch b.
- a: a sketch
- b: a sketch
return a.update(b)
"""
a.update(b)
return a
class SketchList(object):
@staticmethod
def initializer(clslist, argslist):
"""
FIXME: use of global not really nice (possible root of mysterious issue for user)
"""
# Allow automagic expansion of the list of classes
if len(clslist) == 1:
clslist = tuple(clslist[0] for x in range(len(argslist)))
# Allow automagic expansion of the list of args
if len(argslist) == 1:
argslist = tuple(argslist[0] for x in range(len(clslist)))
if len(clslist) != len(argslist):
raise ValueError("The arguments argslist and clslist must be sequences of either the "
"same length, or of length 1.")
global sketchlist_constructor
def sketchlist_constructor():
return (cls(*args) for cls, args in zip(clslist, argslist))
@staticmethod
def map_sequence(sequence):
"""
- sequence: a bytes-like object
return: a sketch
"""
mhslist = tuple(sketchlist_constructor())
for mhs in mhslist:
mhs.add(sequence)
return mhslist
@staticmethod
def map_sequences(iterable):
"""
- iterable: an iterable of bytes-like objects
return: a sketch
"""
mhslist = sketchlist_constructor()
for sequence in iterable:
for mhs in mhslist:
mhs.add(sequence)
return mhs
@staticmethod
def reduce(alist, blist):
"""
Update the sketch a with the content of sketch b.
- alist: a sequence of sketches
- blist: a sequence of sketches
return alist after each of its elements has been updated to the corresponding element in blist
"""
for a,b in zip(alist, blist):
a.update(b)
return alist
|
lgautier/mashing-pumpkins
|
src/parallel.py
|
Python
|
mit
| 2,984
|
#!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
''' Toolbar preprocessing code. Turns all IDS_COMMAND macros in the RC file
into simpler constructs that can be understood by GRIT. Also deals with
expansion of $lf; placeholders into the correct linefeed character.
'''
import preprocess_interface
import re
import sys
import codecs
class ToolbarPreProcessor(preprocess_interface.PreProcessor):
''' Toolbar PreProcessing class.
'''
_IDS_COMMAND_MACRO = re.compile(r'(.*IDS_COMMAND)\s*\(([a-zA-Z0-9_]*)\s*,\s*([a-zA-Z0-9_]*)\)(.*)')
_LINE_FEED_PH = re.compile(r'\$lf;')
_PH_COMMENT = re.compile(r'PHRWR')
_COMMENT = re.compile(r'^(\s*)//.*')
def Process(self, rctext, rcpath):
''' Processes the data in rctext.
Args:
rctext: string containing the contents of the RC file being processed
rcpath: the path used to access the file.
Return:
The processed text.
'''
ret = ''
rclines = rctext.splitlines()
for line in rclines:
if self._LINE_FEED_PH.search(line):
# Replace "$lf;" placeholder comments by an empty line.
# this will not be put into the processed result
if self._PH_COMMENT.search(line):
mm = self._COMMENT.search(line)
if mm:
line = '%s//' % mm.group(1)
else:
# Replace $lf by the right linefeed character
line = self._LINE_FEED_PH.sub(r'\\n', line)
# Deal with IDS_COMMAND_MACRO stuff
mo = self._IDS_COMMAND_MACRO.search(line)
if mo:
line = '%s_%s_%s%s' % (mo.group(1), mo.group(2), mo.group(3), mo.group(4))
ret += (line + '\n')
return ret
|
meego-tablet-ux/meego-app-browser
|
tools/grit/grit/tool/toolbar_preprocess.py
|
Python
|
bsd-3-clause
| 1,794
|
# -*- coding: utf-8 -*-
# Copyright 2019 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import (
models,
)
|
open-synergy/opnsynid-hr
|
hr_employee_employment_status/__init__.py
|
Python
|
agpl-3.0
| 163
|
#!/usr/bin/env python
# Modules
import sys
from classes.state import State
# Process input: parse and act according to verb and arguments
def process_input(state, data):
split = data.partition(' ')
instr = split[0]
args = split[2]
if instr == '':
return
elif instr == 'exit' or instr == 'quit':
print state.locale.get_string('exit-confirm',[])
noinput = True
while noinput:
response = raw_input(state.locale.get_string('exit-prompt',[]))
if response == state.locale.get_phrases('exit-responses')[0]:
print state.locale.get_string('exit-no-save', [])
exit() # End of program
elif response == state.locale.get_phrases('exit-responses')[1]:
noinput = False # Continue execution
elif instr == 'h' or instr == 'help':
print state.locale.get_string('help',[])
elif instr == 'look':
print state.locale.get_string('look',[])
state.print_desc()
elif instr == 'sit':
state.sit()
print state.locale.get_string('sit',[])
elif instr == 'stand':
state.stand()
print state.locale.get_string('stand', [])
elif instr == 'back':
print state.locale.get_string('back', [])
elif instr == 'enter':
desired_door = ''
if state.sitting:
print state.locale.get_string('enter-sitting', [])
elif args == '':
desired_door = raw_input(state.locale.get_string('enter-prompt',[]))
else:
desired_door = args
if state.check_valid_door(desired_door):
print state.locale.get_string('enter-valid', [desired_door])
state.next_state() # to get to next state
state.print_state()
else:
print state.locale.get_string('enter-invalid', [])
state.print_desc()
elif instr == 'set-locale':
desired_locale = ''
if args == '':
desired_locale = raw_input(state.locale.get_string('locale-prompt',[]))
else:
desired_locale = args
if state.locale.set_locale(desired_locale):
print state.locale.get_string('locale-set-success',[desired_locale])
else:
print state.locale.get_string('locale-set-failure',[desired_locale])
elif instr == 'check-locale':
print state.locale.get_locale()
elif instr == 'list-locales':
print state.locale.get_localizations()
elif instr == 'save':
if args == '':
response = raw_input(state.locale.get_string('save-prompt',[]))
print state.create_save(response)
else:
print state.create_save(args)
elif instr == 'load':
if args == '':
response = raw_input(state.locale.get_string('load-prompt',[]))
print state.load_save(response)
else:
print state.load_save(args)
elif instr == 'list-saves':
print state.locale.get_string('load-list-saves',[state.get_available_saves()])
elif instr == 'remove-save':
if args == '':
response = raw_input(state.locale.get_string('remove-prompt',[]))
print state.remove_save(response)
else:
print state.remove_save(args)
else:
print state.locale.get_string('invalid-input', [])
return
# Loop for our interface
def interface_loop():
state = State()
state.print_state()
# Input parsing block
while True:
data = raw_input('> ').lower()
process_input(state, data)
def main():
if len(sys.argv) > 1:
interface_loop() # Start loop until exit `return`
else:
print 'Run with any arguments to start.' # not localized
# Call the main() function to begin the program.
main()
|
thomasrussellmurphy/menachem
|
menachem.py
|
Python
|
gpl-3.0
| 3,977
|
'''
Created on 03.11.2013
@author: bernd
'''
if __name__ == '__main__':
import emLibrary.DataFile as emDF
from os import listdir
from os.path import isfile, join
import mysqldb.mysqldb as mysqldb
configFile = 'em.config'
mypath = '/home/bernd/git/EnergyMonitor/Messfiles/2013-11-26 ID 0'
files = [ mypath + "/" + f for f in listdir(mypath) if (isfile(join(mypath,f)) and (f.endswith('.bin') or f.endswith('.BIN'))) ]
print files
for i in files:
try:
db, user, passwd, host = mysqldb.load_DB_params(configFile)
print db, user, passwd, host
con = mysqldb.con_init(host, user, passwd, db)
emDF.DataFile(i).toSqlDB(con)
#print "Now do con.query"
# con.execute("SET AUTOCOMMIT=0")
#v = con.execute("""INSERT INTO `test`.`PandasTest`
# (`DateTime`, `Voltage`, `Current`, `Phase`)
# VALUES ('2013-11-13 19:05:00', 236.0, 0.0, 0.0);""")
#print "con.query done"
#print "Now do con.commit"
# con.execute("COMMIT")
#print "con.commit done"
print "Finished with tile: %s. " % i
mysqldb.con_close(con)
except:
print "Fault with %s, maybe a Info File!" % i
|
frodo81/EnergyMonitor
|
InsertAll.py
|
Python
|
gpl-2.0
| 1,342
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import MySQLdb
import numpy as np
from datetime import datetime
import matplotlib.pyplot as plt
class statistics_info:
def __init__(self):
self.num = 0
self.sum = 0.0
self.weeks = []
self.user_sum = [0 for i in range(7)]
class week_info:
def __init__(self):
self.sum = 0.0
self.max = 0.0
self.min = 0.0
self.mean = 0.0
def get_user_count(cursor, shop_id):
sql_str = "select shop_count,dates from shopcount where shop_id='" + str(shop_id) + "' order by dates asc"
cursor.execute(sql_str)
results = cursor.fetchall()
date = []
x_cnt = []
for row in results:
x_cnt.append(row[0])
date.append(row[1])
return (date, x_cnt)
def filter_some_days(days, flags, filter_days):
date_len = len(days)
for i in xrange(0, date_len):
day = days[i]
if day in filter_days:
flags[i] = False
def filter_whole_week(days, flags):
year = 0
weeknumber = 0
weekday = 0
date_len = len(days)
day_num = 0
for i in xrange(0, date_len):
if (flags[i] == False):
continue
day = days[i]
(year0, weeknumber0, weekday0) = day.isocalendar()
if (year == 0):
if (weekday0 == 1):
year = year0
weeknumber = weeknumber0
day_num = 1
else :
flags[i] = False
else :
if (weeknumber0 != weeknumber):
if (day_num != 7):
day_num0 = 0
for j in xrange(1, 8):
if (flags[i-j] == True and day_num0 < day_num):
flags[i-j] = False
day_num0 = day_num0 + 1
weeknumber = weeknumber0
day_num = 1
else :
day_num = day_num + 1
if (day_num != 7):
day_num0 = 0
for j in xrange(0, 7):
if (flags[i-j] == True and day_num0 < day_num):
flags[i-j] = False
day_num0 = day_num0 + 1
def handle_week(days, x_cnt):
week = week_info()
np_x_cnt = np.array(x_cnt)
week.min = np.min(np_x_cnt)
week.max = np.max(np_x_cnt)
week.mean = np.mean(np_x_cnt)
week.sum = np.sum(np_x_cnt)
return week
def handle_weeks(days, flags, x_cnt):
date_len = len(days)
total_info = statistics_info()
day_num = 0
i = 0
while (i < date_len):
if (flags[i] == True):
days0 = days[i : i + 7]
x_cnt0 = x_cnt[i : i + 7]
week = handle_week(days0, x_cnt0)
total_info.weeks.append(week)
total_info.sum = total_info.sum + week.sum
total_info.num = total_info.num + 7
for j in xrange(7):
total_info.user_sum[j] = total_info.user_sum[j] + x_cnt0[j]
i = i + 7
else:
i = i + 1
return total_info
def show_total_info(total_info):
mean = []
week_len = len(total_info.weeks)
for i in xrange(week_len):
mean.append(total_info.weeks[i].mean)
def output_predict(fp, shop_id, user_predict):
output_str = str(shop_id)
predict_len = len(user_predict)
for i in xrange(predict_len):
output_str = output_str + "," + str(int(user_predict[i]))
output_str = output_str + "\r\n"
fp.write(output_str)
db = MySQLdb.connect("localhost","root","kkkkkk","tc")
cursor = db.cursor()
fp = open("predict.csv","w")
for id in xrange(1, 2001):
print "shop_id: %d" % id
(date_item, x_cnt_item) = get_user_count(cursor, id)
date_len = len(date_item)
flags = [True for i in range(date_len)]
filter_days=[datetime.date(datetime(2016, 5, 1)), datetime.date(datetime(2016, 5, 3)), datetime.date(datetime(2016, 10, 1)),datetime.date(datetime(2016, 10, 7)),datetime.date(datetime(2016, 2, 5)),datetime.date(datetime(2016, 2, 12)),datetime.date(datetime(2016, 2, 19)),datetime.date(datetime(2016, 1, 1))]
filter_some_days(date_item, flags, filter_days)
filter_whole_week(date_item, flags)
#for i in xrange(0, date_len):
#print "%s:%s:%d" % (date_item[i], flags[i], x_cnt_item[i])
total = handle_weeks(date_item, flags, x_cnt_item)
np_user_sum = np.array(total.user_sum)
user_mean = np.mean(np_user_sum)
np_user_sum_ratio = np.array(total.user_sum) / user_mean
weeks_len = len(total.weeks)
last_week_sum = total.weeks[weeks_len - 1].sum
last_week_mean = last_week_sum / 7.0
last_week_sum2 = (total.weeks[weeks_len - 3].sum + total.weeks[weeks_len - 2].sum) / 2.0
last_week_mean2 = last_week_sum2 / 7.0
gradient = (last_week_mean - last_week_mean2) / (3.0 * last_week_mean)
gradient_threshold = 0.05
ratio_threshold = 2.0
if (abs(gradient) > gradient_threshold):
#print "xxx: %d" % id
total_mean = total.sum / total.num
ratio = last_week_mean2 / total_mean
if (ratio > ratio_threshold or ratio < (1 / ratio_threshold)):
#print "yyy: %d" % id
last_week_mean = total_mean
else :
#print "zzz: %d" % id
last_week_mean = last_week_mean2
if (gradient > 0):
gradient = -gradient_threshold * 0.5
else :
gradient = gradient_threshold * 0.5
gradient = 1 - gradient
#print "%r:%r:%r" % (last_week_mean, last_week_mean2, gradient)
predict = []
now_week_mean = last_week_mean * gradient
np_user_predict = now_week_mean * np_user_sum_ratio
user_predict = np.rint(np_user_predict)
predict.extend(user_predict[1:7])
now_week_mean = now_week_mean * gradient
np_user_predict = now_week_mean * np_user_sum_ratio
user_predict = np.rint(np_user_predict)
predict.extend(user_predict)
now_week_mean = now_week_mean * gradient
np_user_predict = now_week_mean * np_user_sum_ratio
user_predict = np.rint(np_user_predict)
predict.append(user_predict[0])
output_predict(fp, id, predict)
fp.close()
cursor.close()
db.close()
|
antkillerfarm/antkillerfarm_crazy
|
python/ml/tc/tc0209.py
|
Python
|
gpl-3.0
| 6,167
|
#!/usr/bin/env python
#encoding: utf8
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import re
import torndb
import tornado.auth
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import unicodedata
import os, sys
import xmlrpclib
from tornado.options import define, options
sys.path.append( os.path.join( os.path.abspath("."), "handlers" ) )
from home import HomeHandler
from home import Search
from home import SearchByHashInfo
from acc_heatmap import AccHeatMap
define("port", default=8888, help="run on the given port", type=int)
define("mysql_host", default="127.0.0.1:3306", help="blog database host")
define("mysql_database", default="hexiong", help="blog database name")
define("mysql_user", default="hexiong", help="blog database user")
define("mysql_password", default="hexiong", help="blog database password")
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", HomeHandler),
(r"/search", Search),
(r"/get", SearchByHashInfo),
(r"/acc_heatmap", AccHeatMap)
]
settings = dict(
blog_title=u"搜电影,找哈哈",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
debug=True,
)
tornado.web.Application.__init__(self, handlers, **settings)
# Have one global connection to the blog DB across all handlers
self.db = torndb.Connection(
host=options.mysql_host, database=options.mysql_database,
user=options.mysql_user, password=options.mysql_password)
self.db._db_args["init_command"] = ('set time_zone = "+8:00"')
self.db.reconnect()
self.db.execute( "set names utf8" )
self.seg = xmlrpclib.ServerProxy("http://localhost:8080/")
def main():
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(Application(), xheaders=True)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
baiyang/haha-project
|
aha_web/aha.py
|
Python
|
gpl-2.0
| 2,822
|
# (C) British Crown Copyright 2011 - 2015, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import matplotlib
import matplotlib.collections as mcollections
import matplotlib.text as mtext
import matplotlib.ticker as mticker
import matplotlib.transforms as mtrans
import numpy as np
import six
import cartopy
from cartopy.crs import Projection, _RectangularProjection
degree_locator = mticker.MaxNLocator(nbins=9, steps=[1, 2, 3, 6, 15, 18])
_DEGREE_SYMBOL = u'\u00B0'
def _fix_lons(lons):
"""
Fix the given longitudes into the range ``[-180, 180]``.
"""
lons = np.array(lons, copy=False, ndmin=1)
fixed_lons = ((lons + 180) % 360) - 180
# Make the positive 180s positive again.
fixed_lons[(fixed_lons == -180) & (lons > 0)] *= -1
return fixed_lons
def _lon_heimisphere(longitude):
"""Return the hemisphere (E, W or '' for 0) for the given longitude."""
longitude = _fix_lons(longitude)
if longitude > 0:
hemisphere = 'E'
elif longitude < 0:
hemisphere = 'W'
else:
hemisphere = ''
return hemisphere
def _lat_heimisphere(latitude):
"""Return the hemisphere (N, S or '' for 0) for the given latitude."""
if latitude > 0:
hemisphere = 'N'
elif latitude < 0:
hemisphere = 'S'
else:
hemisphere = ''
return hemisphere
def _east_west_formatted(longitude, num_format='g'):
fmt_string = u'{longitude:{num_format}}{degree}{hemisphere}'
return fmt_string.format(longitude=abs(longitude), num_format=num_format,
hemisphere=_lon_heimisphere(longitude),
degree=_DEGREE_SYMBOL)
def _north_south_formatted(latitude, num_format='g'):
fmt_string = u'{latitude:{num_format}}{degree}{hemisphere}'
return fmt_string.format(latitude=abs(latitude), num_format=num_format,
hemisphere=_lat_heimisphere(latitude),
degree=_DEGREE_SYMBOL)
#: A formatter which turns longitude values into nice longitudes such as 110W
LONGITUDE_FORMATTER = mticker.FuncFormatter(lambda v, pos:
_east_west_formatted(v))
#: A formatter which turns longitude values into nice longitudes such as 45S
LATITUDE_FORMATTER = mticker.FuncFormatter(lambda v, pos:
_north_south_formatted(v))
class Gridliner(object):
# NOTE: In future, one of these objects will be add-able to a GeoAxes (and
# maybe even a plain old mpl axes) and it will call the "_draw_gridliner"
# method on draw. This will enable automatic gridline resolution
# determination on zoom/pan.
def __init__(self, axes, crs, draw_labels=False, xlocator=None,
ylocator=None, collection_kwargs=None):
"""
Object used by :meth:`cartopy.mpl.geoaxes.GeoAxes.gridlines`
to add gridlines and tick labels to a map.
Args:
* axes
The :class:`cartopy.mpl.geoaxes.GeoAxes` object to be drawn on.
* crs
The :class:`cartopy.crs.CRS` defining the coordinate system that
the gridlines are drawn in.
* draw_labels
Toggle whether to draw labels. For finer control, attributes of
:class:`Gridliner` may be modified individually.
* xlocator
A :class:`matplotlib.ticker.Locator` instance which will be used
to determine the locations of the gridlines in the x-coordinate of
the given CRS. Defaults to None, which implies automatic locating
of the gridlines.
* ylocator
A :class:`matplotlib.ticker.Locator` instance which will be used
to determine the locations of the gridlines in the y-coordinate of
the given CRS. Defaults to None, which implies automatic locating
of the gridlines.
* collection_kwargs
Dictionary controlling line properties, passed to
:class:`matplotlib.collections.Collection`.
"""
self.axes = axes
#: The :class:`~matplotlib.ticker.Locator` to use for the x
#: gridlines and labels.
self.xlocator = xlocator or degree_locator
#: The :class:`~matplotlib.ticker.Locator` to use for the y
#: gridlines and labels.
self.ylocator = ylocator or degree_locator
#: The :class:`~matplotlib.ticker.Formatter` to use for the x labels.
self.xformatter = mticker.ScalarFormatter()
self.xformatter.create_dummy_axis()
#: The :class:`~matplotlib.ticker.Formatter` to use for the y labels.
self.yformatter = mticker.ScalarFormatter()
self.yformatter.create_dummy_axis()
#: Whether to draw labels on the top of the map.
self.xlabels_top = draw_labels
#: Whether to draw labels on the bottom of the map.
self.xlabels_bottom = draw_labels
#: Whether to draw labels on the left hand side of the map.
self.ylabels_left = draw_labels
#: Whether to draw labels on the right hand side of the map.
self.ylabels_right = draw_labels
#: Whether to draw the x gridlines.
self.xlines = True
#: Whether to draw the y gridlines.
self.ylines = True
#: A dictionary passed through to ``ax.text`` on x label creation
#: for styling of the text labels.
self.xlabel_style = {}
#: A dictionary passed through to ``ax.text`` on y label creation
#: for styling of the text labels.
self.ylabel_style = {}
self.crs = crs
# if the user specifies tick labels at this point, check if they can
# be drawn. The same check will take place at draw time in case
# public attributes are changed after instantiation.
if draw_labels:
self._assert_can_draw_ticks()
#: The number of interpolation points which are used to draw the
#: gridlines.
self.n_steps = 30
#: A dictionary passed through to
#: ``matplotlib.collections.LineCollection`` on grid line creation.
self.collection_kwargs = collection_kwargs
#: The x gridlines which were created at draw time.
self.xline_artists = []
#: The y gridlines which were created at draw time.
self.yline_artists = []
#: The x labels which were created at draw time.
self.xlabel_artists = []
#: The y labels which were created at draw time.
self.ylabel_artists = []
def _crs_transform(self):
"""
Get the drawing transform for our gridlines.
.. note::
this depends on the transform of our 'axes', so it may change
dynamically.
"""
transform = self.crs
if not isinstance(transform, mtrans.Transform):
transform = transform._as_mpl_transform(self.axes)
return transform
def _add_gridline_label(self, value, axis, upper_end):
"""
Create a Text artist on our axes for a gridline label.
Args:
* value
Coordinate value of this gridline. The text contains this
value, and is positioned centred at that point.
* axis
which axis the label is on: 'x' or 'y'.
* upper_end
If True, place at the maximum of the "other" coordinate (Axes
coordinate == 1.0). Else 'lower' end (Axes coord = 0.0).
"""
transform = self._crs_transform()
shift_dist_points = 5 # A margin from the map edge.
if upper_end is False:
shift_dist_points = -shift_dist_points
if axis == 'x':
x = value
y = 1.0 if upper_end else 0.0
h_align = 'center'
v_align = 'bottom' if upper_end else 'top'
tr_x = transform
tr_y = self.axes.transAxes + \
mtrans.ScaledTranslation(
0.0,
shift_dist_points * (1.0 / 72),
self.axes.figure.dpi_scale_trans)
str_value = self.xformatter(value)
user_label_style = self.xlabel_style
elif axis == 'y':
y = value
x = 1.0 if upper_end else 0.0
v_align = 'center'
h_align = 'left' if upper_end else 'right'
tr_y = transform
tr_x = self.axes.transAxes + \
mtrans.ScaledTranslation(
shift_dist_points * (1.0 / 72),
0.0,
self.axes.figure.dpi_scale_trans)
str_value = self.yformatter(value)
user_label_style = self.ylabel_style
else:
raise ValueError(
"Unknown axis, {!r}, must be either 'x' or 'y'".format(axis))
# Make a 'blended' transform for label text positioning.
# One coord is geographic, and the other a plain Axes
# coordinate with an appropriate offset.
label_transform = mtrans.blended_transform_factory(
x_transform=tr_x, y_transform=tr_y)
label_style = {'verticalalignment': v_align,
'horizontalalignment': h_align,
}
label_style.update(user_label_style)
# Create and add a Text artist with these properties
text_artist = mtext.Text(x, y, str_value,
clip_on=False,
transform=label_transform, **label_style)
if axis == 'x':
self.xlabel_artists.append(text_artist)
elif axis == 'y':
self.ylabel_artists.append(text_artist)
self.axes.add_artist(text_artist)
def _draw_gridliner(self, nx=None, ny=None, background_patch=None):
"""Create Artists for all visible elements and add to our Axes."""
x_lim, y_lim = self._axes_domain(nx=nx, ny=ny,
background_patch=background_patch)
transform = self._crs_transform()
rc_params = matplotlib.rcParams
n_steps = self.n_steps
x_ticks = self.xlocator.tick_values(x_lim[0], x_lim[1])
y_ticks = self.ylocator.tick_values(y_lim[0], y_lim[1])
# XXX this bit is cartopy specific. (for circular longitudes)
# Purpose: omit plotting the last x line, as it may overlap the first.
x_gridline_points = x_ticks[:]
crs = self.crs
if (isinstance(crs, Projection) and
isinstance(crs, _RectangularProjection) and
abs(np.diff(x_lim)) == abs(np.diff(crs.x_limits))):
x_gridline_points = x_gridline_points[:-1]
collection_kwargs = self.collection_kwargs
if collection_kwargs is None:
collection_kwargs = {}
collection_kwargs = collection_kwargs.copy()
collection_kwargs['transform'] = transform
# XXX doesn't gracefully handle lw vs linewidth aliases...
collection_kwargs.setdefault('color', rc_params['grid.color'])
collection_kwargs.setdefault('linestyle', rc_params['grid.linestyle'])
collection_kwargs.setdefault('linewidth', rc_params['grid.linewidth'])
if self.xlines:
lines = []
for x in x_gridline_points:
l = list(zip(np.zeros(n_steps) + x,
np.linspace(min(y_ticks), max(y_ticks), n_steps)))
lines.append(l)
x_lc = mcollections.LineCollection(lines, **collection_kwargs)
self.xline_artists.append(x_lc)
self.axes.add_collection(x_lc, autolim=False)
if self.ylines:
lines = []
for y in y_ticks:
l = list(zip(np.linspace(min(x_ticks), max(x_ticks), n_steps),
np.zeros(n_steps) + y))
lines.append(l)
y_lc = mcollections.LineCollection(lines, **collection_kwargs)
self.yline_artists.append(y_lc)
self.axes.add_collection(y_lc, autolim=False)
#################
# Label drawing #
#################
# Trim outside-area points from the label coords.
# Tickers may round *up* the desired range to something tidy, not
# all of which is necessarily visible. We must be stricter with
# our texts, as they are drawn *without clipping*.
x_label_points = [x for x in x_ticks if x_lim[0] <= x <= x_lim[1]]
y_label_points = [y for y in y_ticks if y_lim[0] <= y <= y_lim[1]]
if self.xlabels_bottom or self.xlabels_top:
self._assert_can_draw_ticks()
self.xformatter.set_locs(x_label_points)
for x in x_label_points:
if self.xlabels_bottom:
self._add_gridline_label(x, axis='x', upper_end=False)
if self.xlabels_top:
self._add_gridline_label(x, axis='x', upper_end=True)
if self.ylabels_left or self.ylabels_right:
self._assert_can_draw_ticks()
self.yformatter.set_locs(y_label_points)
for y in y_label_points:
if self.ylabels_left:
self._add_gridline_label(y, axis='y', upper_end=False)
if self.ylabels_right:
self._add_gridline_label(y, axis='y', upper_end=True)
def _assert_can_draw_ticks(self):
"""
Check to see if ticks can be drawn. Either returns True or raises
an exception.
"""
# Check labelling is supported, currently a limited set of options.
if not isinstance(self.crs, cartopy.crs.PlateCarree):
raise TypeError('Cannot label {crs.__class__.__name__} gridlines.'
' Only PlateCarree gridlines are currently '
'supported.'.format(crs=self.crs))
if not isinstance(self.axes.projection,
(cartopy.crs.PlateCarree, cartopy.crs.Mercator)):
raise TypeError('Cannot label gridlines on a '
'{prj.__class__.__name__} plot. Only PlateCarree'
' and Mercator plots are currently '
'supported.'.format(prj=self.axes.projection))
return True
def _axes_domain(self, nx=None, ny=None, background_patch=None):
"""Returns x_range, y_range"""
DEBUG = False
transform = self._crs_transform()
ax_transform = self.axes.transAxes
desired_trans = ax_transform - transform
nx = nx or 30
ny = ny or 30
x = np.linspace(1e-9, 1 - 1e-9, nx)
y = np.linspace(1e-9, 1 - 1e-9, ny)
x, y = np.meshgrid(x, y)
coords = np.concatenate([x.flatten()[:, None],
y.flatten()[:, None]],
1)
in_data = desired_trans.transform(coords)
ax_to_bkg_patch = self.axes.transAxes - \
background_patch.get_transform()
ok = np.zeros(in_data.shape[:-1], dtype=np.bool)
# XXX Vectorise contains_point
for i, val in enumerate(in_data):
# convert the coordinates of the data to the background
# patches coordinates
background_coord = ax_to_bkg_patch.transform(coords[i:i + 1, :])
bkg_patch_contains = background_patch.get_path().contains_point
if bkg_patch_contains(background_coord[0, :]):
color = 'r'
ok[i] = True
else:
color = 'b'
if DEBUG:
import matplotlib.pyplot as plt
plt.plot(coords[i, 0], coords[i, 1], 'o' + color,
clip_on=False, transform=ax_transform)
# plt.text(coords[i, 0], coords[i, 1], str(val), clip_on=False,
# transform=ax_transform, rotation=23,
# horizontalalignment='right')
inside = in_data[ok, :]
# If there were no data points in the axes we just use the x and y
# range of the projection.
if inside.size == 0:
x_range = self.crs.x_limits
y_range = self.crs.y_limits
else:
x_range = np.nanmin(inside[:, 0]), np.nanmax(inside[:, 0])
y_range = np.nanmin(inside[:, 1]), np.nanmax(inside[:, 1])
# XXX Cartopy specific thing. Perhaps make this bit a specialisation
# in a subclass...
crs = self.crs
if isinstance(crs, Projection):
x_range = np.clip(x_range, *crs.x_limits)
y_range = np.clip(y_range, *crs.y_limits)
# if the limit is >90% of the full x limit, then just use the full
# x limit (this makes circular handling better)
prct = np.abs(np.diff(x_range) / np.diff(crs.x_limits))
if prct > 0.9:
x_range = crs.x_limits
return x_range, y_range
|
dkillick/cartopy
|
lib/cartopy/mpl/gridliner.py
|
Python
|
lgpl-3.0
| 17,713
|
# -*- coding: utf-8 -*-
""" PolymorphicModel Meta Class
Please see README.rst or DOCS.rst or http://bserve.webhop.org/wiki/django_polymorphic
"""
import sys
import inspect
from django.db import models
from django.db.models.base import ModelBase
from manager import PolymorphicManager
from query import PolymorphicQuerySet
# PolymorphicQuerySet Q objects (and filter()) support these additional key words.
# These are forbidden as field names (a descriptive exception is raised)
POLYMORPHIC_SPECIAL_Q_KWORDS = ['instance_of', 'not_instance_of']
###################################################################################
### PolymorphicModel meta class
class PolymorphicModelBase(ModelBase):
"""
Manager inheritance is a pretty complex topic which may need
more thought regarding how this should be handled for polymorphic
models.
In any case, we probably should propagate 'objects' and 'base_objects'
from PolymorphicModel to every subclass. We also want to somehow
inherit/propagate _default_manager as well, as it needs to be polymorphic.
The current implementation below is an experiment to solve this
problem with a very simplistic approach: We unconditionally
inherit/propagate any and all managers (using _copy_to_model),
as long as they are defined on polymorphic models
(the others are left alone).
Like Django ModelBase, we special-case _default_manager:
if there are any user-defined managers, it is set to the first of these.
We also require that _default_manager as well as any user defined
polymorphic managers produce querysets that are derived from
PolymorphicQuerySet.
"""
def __new__(self, model_name, bases, attrs):
#print; print '###', model_name, '- bases:', bases
# create new model
new_class = self.call_superclass_new_method(model_name, bases, attrs)
# check if the model fields are all allowed
self.validate_model_fields(new_class)
# create list of all managers to be inherited from the base classes
inherited_managers = new_class.get_inherited_managers(attrs)
# add the managers to the new model
for source_name, mgr_name, manager in inherited_managers:
#print '** add inherited manager from model %s, manager %s, %s' % (source_name, mgr_name, manager.__class__.__name__)
new_manager = manager._copy_to_model(new_class)
new_class.add_to_class(mgr_name, new_manager)
# get first user defined manager; if there is one, make it the _default_manager
user_manager = self.get_first_user_defined_manager(model_name, attrs)
if user_manager:
def_mgr = user_manager._copy_to_model(new_class)
#print '## add default manager', type(def_mgr)
new_class.add_to_class('_default_manager', def_mgr)
new_class._default_manager._inherited = False # the default mgr was defined by the user, not inherited
# validate resulting default manager
self.validate_model_manager(new_class._default_manager, model_name, '_default_manager')
# for __init__ function of this class (monkeypatching inheritance accessors)
new_class.polymorphic_super_sub_accessors_replaced = False
# determine the name of the primary key field and store it into the class variable
# polymorphic_primary_key_name (it is needed by query.py)
for f in new_class._meta.fields:
if f.primary_key and type(f) != models.OneToOneField:
new_class.polymorphic_primary_key_name = f.name
break
return new_class
def get_inherited_managers(self, attrs):
"""
Return list of all managers to be inherited/propagated from the base classes;
use correct mro, only use managers with _inherited==False (they are of no use),
skip managers that are overwritten by the user with same-named class attributes (in attrs)
"""
add_managers = []
add_managers_keys = set()
for base in self.__mro__[1:]:
if not issubclass(base, models.Model):
continue
if not getattr(base, 'polymorphic_model_marker', None):
continue # leave managers of non-polym. models alone
for key, manager in base.__dict__.items():
if type(manager) == models.manager.ManagerDescriptor:
manager = manager.manager
if not isinstance(manager, models.Manager):
continue
if key in ['_base_manager']:
continue # let Django handle _base_manager
if key in attrs:
continue
if key in add_managers_keys:
continue # manager with that name already added, skip
if manager._inherited:
continue # inherited managers (on the bases) have no significance, they are just copies
#print >>sys.stderr,'##',self.__name__, key
if isinstance(manager, PolymorphicManager): # validate any inherited polymorphic managers
self.validate_model_manager(manager, self.__name__, key)
add_managers.append((base.__name__, key, manager))
add_managers_keys.add(key)
return add_managers
@classmethod
def get_first_user_defined_manager(self, model_name, attrs):
mgr_list = []
for key, item in attrs.items():
if isinstance(item, models.Manager):
mgr_list.append((item.creation_counter, key, item))
# if there are user defined managers, use first one as _default_manager
if mgr_list:
_, manager_name, manager = sorted(mgr_list)[0]
#sys.stderr.write( '\n# first user defined manager for model "{model}":\n# "{mgrname}": {mgr}\n# manager model: {mgrmodel}\n\n'
# .format( model=model_name, mgrname=manager_name, mgr=manager, mgrmodel=manager.model ) )
return manager
return None
@classmethod
def call_superclass_new_method(self, model_name, bases, attrs):
"""call __new__ method of super class and return the newly created class.
Also work around a limitation in Django's ModelBase."""
# There seems to be a general limitation in Django's app_label handling
# regarding abstract models (in ModelBase). See issue 1 on github - TODO: propose patch for Django
# We run into this problem if polymorphic.py is located in a top-level directory
# which is directly in the python path. To work around this we temporarily set
# app_label here for PolymorphicModel.
meta = attrs.get('Meta', None)
model_module_name = attrs['__module__']
do_app_label_workaround = (meta
and model_module_name == 'polymorphic'
and model_name == 'PolymorphicModel'
and getattr(meta, 'app_label', None) is None)
if do_app_label_workaround:
meta.app_label = 'poly_dummy_app_label'
new_class = super(PolymorphicModelBase, self).__new__(self, model_name, bases, attrs)
if do_app_label_workaround:
del(meta.app_label)
return new_class
def validate_model_fields(self):
"check if all fields names are allowed (i.e. not in POLYMORPHIC_SPECIAL_Q_KWORDS)"
for f in self._meta.fields:
if f.name in POLYMORPHIC_SPECIAL_Q_KWORDS:
e = 'PolymorphicModel: "%s" - field name "%s" is not allowed in polymorphic models'
raise AssertionError(e % (self.__name__, f.name))
@classmethod
def validate_model_manager(self, manager, model_name, manager_name):
"""check if the manager is derived from PolymorphicManager
and its querysets from PolymorphicQuerySet - throw AssertionError if not"""
if not issubclass(type(manager), PolymorphicManager):
e = 'PolymorphicModel: "' + model_name + '.' + manager_name + '" manager is of type "' + type(manager).__name__
e += '", but must be a subclass of PolymorphicManager'
raise AssertionError(e)
if not getattr(manager, 'queryset_class', None) or not issubclass(manager.queryset_class, PolymorphicQuerySet):
e = 'PolymorphicModel: "' + model_name + '.' + manager_name + '" (PolymorphicManager) has been instantiated with a queryset class which is'
e += ' not a subclass of PolymorphicQuerySet (which is required)'
raise AssertionError(e)
return manager
# hack: a small patch to Django would be a better solution.
# Django's management command 'dumpdata' relies on non-polymorphic
# behaviour of the _default_manager. Therefore, we catch any access to _default_manager
# here and return the non-polymorphic default manager instead if we are called from 'dumpdata.py'
# (non-polymorphic default manager is 'base_objects' for polymorphic models).
# This way we don't need to patch django.core.management.commands.dumpdata
# for all supported Django versions.
# TODO: investigate Django how this can be avoided
_dumpdata_command_running = False
if len(sys.argv) > 1:
_dumpdata_command_running = (sys.argv[1] == 'dumpdata')
def __getattribute__(self, name):
if name == '_default_manager':
if self._dumpdata_command_running:
frm = inspect.stack()[1] # frm[1] is caller file name, frm[3] is caller function name
if 'django/core/management/commands/dumpdata.py' in frm[1]:
return self.base_objects
#caller_mod_name = inspect.getmodule(frm[0]).__name__ # does not work with python 2.4
#if caller_mod_name == 'django.core.management.commands.dumpdata':
return super(PolymorphicModelBase, self).__getattribute__(name)
|
sdelements/django_polymorphic
|
polymorphic/base.py
|
Python
|
bsd-3-clause
| 10,068
|
# Copyright (c) 2010 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Brad Beckmann
from m5.SimObject import SimObject
from MemObject import MemObject
from m5.params import *
from m5.proxy import *
class DirectedGenerator(SimObject):
type = 'DirectedGenerator'
abstract = True
num_cpus = Param.Int("num of cpus")
system = Param.System(Parent.any, "System we belong to")
class SeriesRequestGenerator(DirectedGenerator):
type = 'SeriesRequestGenerator'
addr_increment_size = Param.Int(64, "address increment size")
issue_writes = Param.Bool(True, "issue writes if true, otherwise reads")
class InvalidateGenerator(DirectedGenerator):
type = 'InvalidateGenerator'
addr_increment_size = Param.Int(64, "address increment size")
class RubyDirectedTester(MemObject):
type = 'RubyDirectedTester'
cpuPort = VectorMasterPort("the cpu ports")
requests_to_complete = Param.Int("checks to complete")
generator = Param.DirectedGenerator("the request generator")
|
xiaoyaozi5566/GEM5_DRAMSim2
|
src/cpu/testers/directedtest/RubyDirectedTester.py
|
Python
|
bsd-3-clause
| 2,482
|
import sys, csv
FLOW_TYPES = {
#'PMU_AGG' : "/direct/agg/pmu/",
#'PMU_COM' : "/direct/com/pmu/",
#'AMI_AGG' : "/direct/agg/ami/",
#'AMI_COM' : "/direct/com/ami/",
'WAC' : "/power/wac/",
'PDC' : "/power/pdc/",
'BGD' : "/power/bgd/"
#'DATA' : "/overlay/com/subscription"
}
def gettype(name):
for k, v in FLOW_TYPES.items():
if name.startswith(v):
return k
raise Exception("Unclassified name: '%s'" % name)
def main(infile):
latlog = open("lat_%s" % infile, "w")
latlog.write("srcid dstid timesent timerecv latency flowcls pktname\n")
fh = open(infile)
reader = csv.reader(fh, delimiter=',', skipinitialspace=True)
reader.next() # skip header
deliveredCount = {x: 0 for x in FLOW_TYPES}
deliveredSize = {x: 0 for x in FLOW_TYPES}
outstanding = {}
agg_queue = {}
agg_tmp = {}
list_processed = []
list_wacflows = []
list_pdcflows = []
for nodeid, event, name, payloadsize, time in reader:
nodeid = int(nodeid)
payloadsize = int(payloadsize)
time = float(time)
cls = gettype(name)
if event == "sent":
# pub-sub flow needs different handling due to multisource multicast
if cls == "DATA":
outstanding[name] = (time, payloadsize, 1)
elif cls == "BGD":
pass
else:
if name in outstanding:
#(t1, ps1, c1) = outstanding[name]
#if t1 != time:
# raise Exception("Duplicate outstanding with mismatched timestamp")
#outstanding[name] = (t1, ps1 + payloadsize, c1 + 1)
raise Exception("whoa dupe")
else:
outstanding[name] = (time, payloadsize, 1, nodeid, name)
if cls in ["PMU_COM", "AMI_COM"]:
agg_tmp[name] = agg_queue[(nodeid, cls[:3])]
agg_queue[(nodeid, cls)] = []
elif event == "recv":
if name in outstanding:
t1, ps1, c1, sn1, pktname = outstanding[name]
else:
continue
# don't remove outstanding pub-sub entry, it can be delivered multiple times
if cls != "DATA":
if c1 == 1:
del outstanding[name]
else:
raise Exception("ddd")
outstanding[name] = (t1, ps1 - payloadsize, c1 - 1)
latency = time - t1
deliveredCount[cls] += 1
deliveredSize[cls] += ps1
latlog.write("%d %d %.9f %.9f %.9f %s %s\n" % (sn1, nodeid, t1, time, latency, cls, name))
list_processed.append(str(sn1) + " " + str(nodeid) + " " + str(t1) + " " + str(time) + " " + str(latency) + " " + str(cls) + " " + str(name))
if cls in ["PMU_AGG", "AMI_AGG"]:
if not (nodeid, cls) in agg_queue:
agg_queue[(nodeid, cls[:3])] = []
agg_queue[(nodeid, cls[:3])].append(latency)
if cls in ["PMU_COM", "AMI_COM"]:
for agglat in agg_tmp[name]:
latlog.write("%s %.6f\n" % (cls[:3]+ "_TOT", latency + agglat))
#latlog.close()
#Save all the flows
with open("ip_all_flows.csv") as flowfile:
for line in flowfile:
flowsplit = line.strip().split()
if flowsplit[2] == "WAC":
list_wacflows.append(line.strip())
if flowsplit[2] == "PDC":
list_pdcflows.append(line.strip())
flowcompleted = False
infinitelat = 400
outcounter = 0
#Process each interest sent according to flow
for name in outstanding:
outcounter += 1
print "Processing packet loss...", outcounter, "out of", len(outstanding)
if "wac" in name:
#Check all WAC flows
wacsrcnode = outstanding[name][3]
wacdstnode = name.split("/")[6]
wacsrctime = outstanding[name][0]
wacpktname = outstanding[name][4]
for wflow in list_wacflows:
flowcompleted = False
#If flow exists, check if entry is in the processed list
if (int(wacsrcnode) == int(wflow.split(" ")[1])) and (int(wacdstnode) == int(wflow.split(" ")[0])):
#Verify if flow exist in processed file or not
for procd in list_processed:
procditem = procd.split()
if (int(wacsrcnode) == int(procditem[0])) and (int(wflow.split(" ")[0]) == int(procditem[1])) and (wacpktname.strip() == procditem[6].strip()):
#print "Processing packet loss...WAC flow completed"
flowcompleted = True
break
if flowcompleted == False:
print wacsrcnode, wflow.split(" ")[0], outstanding[name][4], "WAC packet loss!!!"
latlog.write("%d %d %.9f %.9f %.9f %s %s\n" % (int(wacsrcnode), int(wflow.split(" ")[0]), float(wacsrctime), infinitelat, infinitelat - float(wacsrctime) , "WAC", wacpktname))
if "pdc" in name:
#Check all PDC flows
pdcsrcnode = outstanding[name][3]
pdcdstnode = name.split("/")[6]
pdcsrctime = outstanding[name][0]
pdcpktname = outstanding[name][4]
for pflow in list_pdcflows:
flowcompleted = False
#If flow exists, check if entry is in the processed list
if (int(pdcsrcnode) == int(pflow.split(" ")[1])) and (int(pdcdstnode) == int(pflow.split(" ")[0])):
#Verify if flow exist in processed file or not
for procd in list_processed:
procditem = procd.split()
if (int(pdcsrcnode) == int(procditem[0])) and (int(pflow.split(" ")[0]) == int(procditem[1])) and (pdcpktname.strip() == procditem[6].strip()):
#print "Processing packet loss...WAC flow completed"
flowcompleted = True
break
if flowcompleted == False:
print pdcsrcnode, pflow.split(" ")[0], outstanding[name][4], "PDC packet loss!!!"
latlog.write("%d %d %.9f %.9f %.9f %s %s\n" % (int(pdcsrcnode), int(pflow.split(" ")[0]), float(pdcsrctime), infinitelat, infinitelat - float(pdcsrctime) , "PDC", pdcpktname))
lostCount = {x: 0 for x in FLOW_TYPES}
lostSize = {x: 0 for x in FLOW_TYPES}
metlog = open("met_%s" % infile, "w")
metlog.write("flowcls recvcnt recvsize losscnt losssize\n")
#for name, (time, payloadsize, count) in outstanding.items():
#cls = gettype(name)
#if cls != "DATA":
#lostCount[cls] += 1
#lostSize[cls] += payloadsize
#Declare variables to store PMU and AMI total bytes transmitted
totalLossPMU = 0
totalRecvPMU = 0
totalLossAMI = 0
totalRecvAMI = 0
totalLossCntPMU = 0
totalRecvCntPMU = 0
totalLossCntAMI = 0
totalRecvCntAMI = 0
for cls in FLOW_TYPES:
#Calculate total received and lost bytes (PMU and AMI)
if cls == "PMU_AGG":
totalLossPMU += lostSize[cls]
if cls == "PMU_COM":
totalLossPMU += lostSize[cls]
totalRecvPMU += deliveredSize[cls]
totalLossCntPMU += lostCount[cls]
totalRecvCntPMU += deliveredCount[cls]
if cls == "AMI_AGG":
totalLossAMI += lostSize[cls]
if cls == "AMI_COM":
totalLossAMI += lostSize[cls]
totalRecvAMI += deliveredSize[cls]
totalLossCntAMI += lostCount[cls]
totalRecvCntAMI += deliveredCount[cls]
metlog.write("%s %d %d %d %d\n" % (cls, deliveredCount[cls], deliveredSize[cls], lostCount[cls], lostSize[cls]))
#Write total values received at compute layer to processed file (PMU and AMI)
metlog.write("%s %d %d %d %d\n" % ("PMU_TOT", totalRecvCntPMU, totalRecvPMU, totalLossCntPMU, totalLossPMU))
metlog.write("%s %d %d %d %d\n" % ("AMI_TOT", totalRecvCntAMI, totalRecvAMI, totalLossCntAMI, totalLossAMI))
metlog.close()
fh.close()
#add loss packets to the latency file, with very high values (infinity)
metlognew = open("met_%s" % infile, "r")
for line in metlognew:
linevalues = line.split(" ")
if linevalues[0] == "ERROR":
for i in range(int(linevalues[3])):
latlog.write("%s %0.6f\n" % ("ERROR", 0.100000))
if linevalues[0] == "DATA":
for i in range(int(linevalues[3])):
latlog.write("%s %0.6f\n" % ("DATA", 0.100000))
if linevalues[0] == "PMU_TOT":
for i in range(int(linevalues[3])):
latlog.write("%s %0.6f\n" % ("PMU_TOT", 0.100000))
if linevalues[0] == "AMI_TOT":
for i in range(int(linevalues[3])):
latlog.write("%s %0.6f\n" % ("AMI_TOT", 0.100000))
metlognew.close()
latlog.close()
return 0
if __name__ == "__main__":
sys.exit(main(*sys.argv[1:]))
|
nsol-nmsu/ns3-smartgrid
|
preproc-case39cyber-ip.py
|
Python
|
gpl-2.0
| 9,768
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012 by Oscar Morante <oscar@morante.eu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This script uses fribidi library to display rtl text properly
import weechat
from pyfribidi import *
SCRIPT_NAME = "biditext"
SCRIPT_AUTHOR = "Oscar Morante <oscar@morante.eu>"
SCRIPT_VERSION = "1"
SCRIPT_LICENSE = "GPL3"
SCRIPT_DESC = "Use fribidi to handle RTL text"
def biditext_cb(data, modifier, modifier_data, line):
return log2vis(line, LTR)
if __name__ == "__main__":
if weechat.register(SCRIPT_NAME,
SCRIPT_AUTHOR,
SCRIPT_VERSION,
SCRIPT_LICENSE,
SCRIPT_DESC, "", ""):
weechat.hook_modifier('weechat_print', 'biditext_cb', '')
|
qguv/config
|
weechat/plugins/python/biditext.py
|
Python
|
gpl-3.0
| 1,386
|
"""
@name: Modules/House/_test/test_house.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2013-2020 by D. Brian Kimmel
@license: MIT License
@note: Created on Apr 8, 2013
@summary: Test handling the information for a house.
Passed all 11 tests - DBK - 2020-01-27
"""
from Modules.House import HouseInformation
__updated__ = '2020-02-02'
# Import system type stuff
from twisted.trial import unittest
from ruamel.yaml import YAML
# Import PyMh files and modules.
from _test.testing_mixin import SetupPyHouseObj
from Modules.House.house import Api as houseApi
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
TEST_YAML = """\
House:
Name: PinkPoppy
Modules: # Uncomment to use module.
- Family
- Entertainment
# - HVAC
# - Irrigation
- Lighting
# - Pool
# - Rules
- Scheduling
# - Security
# - Sync
"""
class SetupMixin(object):
def setUp(self):
self.m_pyhouse_obj = SetupPyHouseObj().BuildPyHouseObj()
l_yaml = YAML()
self.m_test_config = l_yaml.load(TEST_YAML)
class A0(unittest.TestCase):
def test_00_Print(self):
_x = PrettyFormatAny.form('_test', 'title', 190) # so it is defined when printing is cleaned up.
print('Id: test_house')
class A1_Setup(SetupMixin, unittest.TestCase):
"""
This section will verify the XML in the 'Modules.text.xml_data' file is correct and what the node_local
module can read/write.
"""
def setUp(self):
SetupMixin.setUp(self)
def test_01_PyHouse(self):
print(PrettyFormatAny.form(self.m_pyhouse_obj, 'A1-01-A - PyHouse'))
self.assertIsNotNone(self.m_pyhouse_obj)
def test_02_House(self):
print(PrettyFormatAny.form(self.m_pyhouse_obj.House, 'A1-02-A - House'))
self.assertIsNotNone(self.m_pyhouse_obj.House)
self.assertIsInstance(self.m_pyhouse_obj.House, HouseInformation)
def test_03_Location(self):
print(PrettyFormatAny.form(self.m_pyhouse_obj.House.Location, 'A1-03-A - Location'))
self.assertIsNotNone(self.m_pyhouse_obj.House.Location)
class C1_Read(SetupMixin, unittest.TestCase):
"""
This section tests the reading of the config used by house.
"""
def setUp(self):
SetupMixin.setUp(self)
def test_01_Load(self):
"""
"""
class C2_Write(SetupMixin, unittest.TestCase):
"""
This section tests the writing of XML used by house.
"""
def setUp(self):
SetupMixin.setUp(self)
class P1_Api(SetupMixin, unittest.TestCase):
""" Test the major Api functions
"""
def setUp(self):
SetupMixin.setUp(self)
self.m_api = houseApi(self.m_pyhouse_obj)
def test_01_Init(self):
""" Create a JSON object for Location.
"""
# print(PrettyFormatAny.form(self.m_api, 'P1-01-A - Api'))
pass
def test_02_Load(self):
"""
"""
# print(PrettyFormatAny.form(l_xml, 'P1-02-A - Api'))
def test_03_Start(self):
pass
def test_04_SaveXml(self):
"""
"""
# self.m_api.LoadConfig()
# print(PrettyFormatAny.form(self.m_pyhouse_obj.House, 'P1-04-A - House'))
# print(PrettyFormatAny.form(self.m_pyhouse_obj._Families, 'P1-04-B - House'))
# print(PrettyFormatAny.form(l_xml, 'P1-04-D - Api'))
# ## END DBK
|
DBrianKimmel/PyHouse
|
Project/src/Modules/House/_test/test_house.py
|
Python
|
mit
| 3,437
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from string import Template
REDIRECT_TEMPLATE = Template(
r"""
<!DOCTYPE html>
<html lang="en-US">
<head>
<title>Redirecting...</title>
<script>
var redirect_url = "$redirect_url";
if (window.location.search) {
redirect_url += window.location.search;
}
if (window.location.hash) {
redirect_url += window.location.hash;
}
window.location.href = redirect_url;
</script>
<link rel="canonical" href="$redirect_url" />
<meta charset="utf-8" />
<meta http-equiv="refresh" content="1; URL=$redirect_url" />
<meta name="robots" content="noindex" />
</head>
<body>
<h1>Redirecting...</h1>
<p><a href="$redirect_url">Click here if you are not redirected.</a></p>
</body>
</html>
"""
)
def build_redirect_page(html_path, redirect_url):
os.makedirs(os.path.dirname(html_path), exist_ok=True)
with open(html_path, "w") as fp:
fp.write(REDIRECT_TEMPLATE.substitute(redirect_url=redirect_url).strip())
def load_redirects(src_dir):
redirects_txt = os.path.join(src_dir, "redirects.txt")
assert os.path.isfile(redirects_txt)
result = {}
with open(redirects_txt) as fp:
for line in fp.readlines():
line = line.strip()
skip_conds = [
not line,
not line.startswith("/"),
"->" not in line,
]
if any(skip_conds):
continue
from_path, to_path = line.split("->")
from_path = from_path.strip()
to_path = to_path.strip()
assert from_path.endswith(".html")
assert to_path.endswith(".html")
result[from_path] = to_path
return result
def build_redirect_pages(app, exception):
if app.builder.name != "html":
return
is_latest = app.config.html_context.get("is_latest")
redirects = load_redirects(app.srcdir)
for from_path, to_path in redirects.items():
out_dir = app.outdir
if os.path.dirname(from_path) != "/":
out_dir = os.path.join(out_dir, os.path.dirname(from_path)[1:])
build_redirect_page(
os.path.join(out_dir, os.path.basename(from_path)),
"%s/en/%s/%s"
% (
app.config.html_baseurl,
("latest" if is_latest else "stable"),
to_path,
),
)
print("Built %d redirect pages" % len(redirects))
def build_legacy_rtd_pages(app, exception):
if app.builder.name != "html":
return
for root, dirs, files in os.walk(app.outdir):
for name in files:
if not name.endswith(".html"):
continue
out_dir = os.path.join(os.path.dirname(app.outdir), "rtdpage")
relative_dir = root[len(os.path.commonpath([app.outdir, root])) :] or "/"
if relative_dir != "/":
out_dir = os.path.join(out_dir, relative_dir[1:])
build_redirect_page(
os.path.join(out_dir, name),
"%s/en/latest%s/%s"
% (
app.config.html_baseurl,
relative_dir,
name,
),
)
def setup(app):
app.connect("build-finished", build_redirect_pages)
app.connect("build-finished", build_legacy_rtd_pages)
return {
"parallel_read_safe": True,
"parallel_write_safe": True,
}
|
platformio/platformio-docs
|
_ext/redirects.py
|
Python
|
apache-2.0
| 4,047
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import bottle as app
@app.route('/<:re:(|data.json)>', method=['GET', 'POST'])
def index():
dollar_rate = float(app.request.forms.get('dollar_rate') or 50.0)
hours_per_day = int(app.request.forms.get('hours_per_day') or 2)
days_to_work = int(app.request.forms.get('days_to_work') or 20)
base_salary = float(app.request.forms.get('base_salary') or 20000.0)
days_of_work = int(app.request.forms.get('days_of_work') or 5)
weeks_of_work = int(app.request.forms.get('weeks_of_work') or 4)
hours_of_work = int(app.request.forms.get('hours_of_work') or 8)
domain = float(app.request.forms.get('domain') or 10.0) / 12
hosting = float(app.request.forms.get('hosting') or 5.0)
license = float(app.request.forms.get('license') or 0.0)
submit = app.request.forms.get('submit')
project_hours = hours_per_day * days_to_work
hourly_rate = base_salary / (days_of_work * weeks_of_work) / hours_of_work
overhead = hourly_rate * 0.25
dev_hourly_rate = sum([hourly_rate, overhead])
dev_project_rate = dev_hourly_rate * project_hours
materials = sum([domain, hosting, license]) * dollar_rate
total = sum([dev_project_rate, materials])
if app.request.path == '/data.json':
return locals()
return app.template('index', **locals())
app.run(reloader=True)
|
ejelome/archaic
|
py/prodev-calc/server.py
|
Python
|
mit
| 1,366
|
#!/usr/bin/env python
'''
Written by Dejanira Araiza Illan, January 2017
'''
import re
import os
passed = 0
failed = 0
checked = []
result=[]
for ii in range(10,51):
for num,line in enumerate(open(os.getcwd()'/home/da13683/temporary/data_diffseeds_table_pr/assertion1_'+str(ii)+'.txt','r')):
getdata = re.split("Assertion 1 at test",line)
gettest = re.split("[:]",getdata[1]) #test at 0
getporf = re.split("at global time",gettest[1])
if len(checked)<1:
if re.search('Passed',getporf[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]):
failed=failed+1
result.append(0)
checked.append(int(gettest[0]))
else:
if re.search('Passed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
failed=failed+1
result.append(0)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]==int(gettest[0]):
if result[len(result)-1]==1:
passed=passed-1
failed=failed+1
result[len(result)-1]=0
if checked[len(checked)-1]!=int(gettest[0]):
checked.append(int(gettest[0]))
print passed
print failed
print passed+failed
print len(checked)
print len(result)
print 6560 - len(checked)
print '----'
passed = 0
failed = 0
checked = []
result=[]
for ii in range(10,51):
for num,line in enumerate(open('/home/da13683/temporary/data_diffseeds_table_pr/assertion2_'+str(ii)+'.txt','r')):
getdata = re.split("Assertion 2 at test",line)
gettest = re.split("[:]",getdata[1]) #test at 0
getporf = re.split("at global time",gettest[1])
if len(checked)<1:
if re.search('Passed',getporf[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]):
failed=failed+1
result.append(0)
checked.append(int(gettest[0]))
else:
if re.search('Passed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
failed=failed+1
result.append(0)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]==int(gettest[0]):
if result[len(result)-1]==1:
passed=passed-1
failed=failed+1
result[len(result)-1]=0
if checked[len(checked)-1]!=int(gettest[0]):
checked.append(int(gettest[0]))
print passed
print failed
print passed+failed
print len(checked)
print len(result)
print 6560 - len(checked)
print '----'
passed = 0
failed = 0
checked = []
result=[]
for ii in range(10,51):
for num,line in enumerate(open('/home/da13683/temporary/data_diffseeds_table_pr/assertion3_'+str(ii)+'.txt','r')):
getdata = re.split("Assertion 3 at test",line)
gettest = re.split("[:]",getdata[1]) #test at 0
getporf = re.split("at global time",gettest[1])
if len(checked)<1:
if re.search('Passed',getporf[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]):
failed=failed+1
result.append(0)
checked.append(int(gettest[0]))
else:
if re.search('Passed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
failed=failed+1
result.append(0)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]==int(gettest[0]):
if result[len(result)-1]==1:
passed=passed-1
failed=failed+1
result[len(result)-1]=0
if checked[len(checked)-1]!=int(gettest[0]):
checked.append(int(gettest[0]))
print passed
print failed
print passed+failed
print len(checked)
print len(result)
print 6560 - len(checked)
print '----'
passed = 0
failed = 0
checked = []
result=[]
for ii in range(10,51):
for num,line in enumerate(open('/home/da13683/temporary/data_diffseeds_table_pr/assertion4_'+str(ii)+'.txt','r')):
getdata = re.split("Assertion 4 at trace",line)
gettest = re.split("[:]",getdata[1]) #test at 0
getporf = re.split("at global time",gettest[1])
if len(checked)<1:
if re.search('True',getporf[0]):
passed=passed+1
result.append(1)
checked.append(int(gettest[0]))
else:
if re.search('True',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
passed=passed+1
result.append(1)
if checked[len(checked)-1]!=int(gettest[0]):
checked.append(int(gettest[0]))
print passed
print failed
print passed+failed
print len(checked)
print len(result)
print 6560 - len(checked)
print '----'
passed = 0
failed = 0
checked = []
result=[]
for ii in range(10,51):
for num,line in enumerate(open('/home/da13683/temporary/data_diffseeds_table_pr/assertion5_'+str(ii)+'.txt','r')):
getdata = re.split("Assertion 5 at test",line)
gettest = re.split("[:]",getdata[1]) #test at 0
getporf = re.split("at global time",gettest[1])
if len(checked)<1:
if re.search('Passed',getporf[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]):
failed=failed+1
result.append(0)
checked.append(int(gettest[0]))
else:
if re.search('Passed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
failed=failed+1
result.append(0)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]==int(gettest[0]):
if result[len(result)-1]==1:
passed=passed-1
failed=failed+1
result[len(result)-1]=0
if checked[len(checked)-1]!=int(gettest[0]):
checked.append(int(gettest[0]))
print passed
print failed
print passed+failed
print len(checked)
print len(result)
print 6560 - len(checked)
print '----'
passed = 0
failed = 0
checked = []
result=[]
for ii in range(10,51):
for num,line in enumerate(open('/home/da13683/temporary/data_diffseeds_table_pr/assertion6_'+str(ii)+'.txt','r')):
getdata = re.split("Assertion 6 at test",line)
gettest = re.split("[:]",getdata[1]) #test at 0
getporf = re.split("at global time",gettest[1])
if len(checked)<1:
if re.search('Passed',getporf[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]):
failed=failed+1
result.append(0)
checked.append(int(gettest[0]))
else:
if re.search('Passed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
passed=passed+1
result.append(1)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]!=int(gettest[0]):
failed=failed+1
result.append(0)
elif re.search('Failed',getporf[0]) and checked[len(checked)-1]==int(gettest[0]):
if result[len(result)-1]==1:
passed=passed-1
failed=failed+1
result[len(result)-1]=0
if checked[len(checked)-1]!=int(gettest[0]):
checked.append(int(gettest[0]))
print passed
print failed
print passed+failed
print len(checked)
print len(result)
print 6560 - len(checked)
print '----'
|
robosafe/mc-vs-bdi
|
data/assertioncov_pr_table.py
|
Python
|
gpl-3.0
| 6,941
|
# -*- coding: utf-8 -*-
"""The Logical Volume Manager (LVM) file system implementation."""
import pyvslvm
# This is necessary to prevent a circular import.
import dfvfs.vfs.lvm_file_entry
from dfvfs.lib import definitions
from dfvfs.lib import errors
from dfvfs.lib import lvm
from dfvfs.path import lvm_path_spec
from dfvfs.resolver import resolver
from dfvfs.vfs import file_system
class LVMFileSystem(file_system.FileSystem):
"""Class that implements a file system object using pyvslvm."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_LVM
def __init__(self, resolver_context):
"""Initializes a file system object.
Args:
resolver_context: the resolver context (instance of Context).
"""
super(LVMFileSystem, self).__init__(resolver_context)
self._file_object = None
self._vslvm_volume_group = None
self._vslvm_handle = None
def _Close(self):
"""Closes the file system object.
Raises:
IOError: if the close failed.
"""
self._vslvm_volume_group = None
self._vslvm_handle.close()
self._vslvm_handle = None
self._file_object.close()
self._file_object = None
def _Open(self, path_spec, mode='rb'):
"""Opens the file system object defined by path specification.
Args:
path_spec: a path specification (instance of PathSpec).
mode: optional file access mode. The default is 'rb' read-only binary.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
"""
if not path_spec.HasParent():
raise errors.PathSpecError(
u'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
try:
vslvm_handle = pyvslvm.handle()
vslvm_handle.open_file_object(file_object)
# TODO: implement multi physical volume support.
vslvm_handle.open_physical_volume_files_as_file_objects([
file_object])
vslvm_volume_group = vslvm_handle.get_volume_group()
except:
file_object.close()
raise
self._file_object = file_object
self._vslvm_handle = vslvm_handle
self._vslvm_volume_group = vslvm_volume_group
def FileEntryExistsByPathSpec(self, path_spec):
"""Determines if a file entry for a path specification exists.
Args:
path_spec: a path specification (instance of PathSpec).
Returns:
Boolean indicating if the file entry exists.
"""
volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec)
# The virtual root file has not corresponding volume index but
# should have a location.
if volume_index is None:
location = getattr(path_spec, u'location', None)
return location is not None and location == self.LOCATION_ROOT
return (volume_index >= 0 and
volume_index < self._vslvm_volume_group.number_of_logical_volumes)
def GetFileEntryByPathSpec(self, path_spec):
"""Retrieves a file entry for a path specification.
Args:
path_spec: a path specification (instance of PathSpec).
Returns:
A file entry (instance of FileEntry) or None.
"""
volume_index = lvm.LVMPathSpecGetVolumeIndex(path_spec)
# The virtual root file has not corresponding volume index but
# should have a location.
if volume_index is None:
location = getattr(path_spec, u'location', None)
if location is None or location != self.LOCATION_ROOT:
return
return dfvfs.vfs.lvm_file_entry.LVMFileEntry(
self._resolver_context, self, path_spec, is_root=True,
is_virtual=True)
if (volume_index < 0 or
volume_index >= self._vslvm_volume_group.number_of_logical_volumes):
return
return dfvfs.vfs.lvm_file_entry.LVMFileEntry(
self._resolver_context, self, path_spec)
def GetLVMVolumeGroup(self):
"""Retrieves the LVM volume group object.
Returns:
The LVM handle object (instance of pyvslvm.volume_group).
"""
return self._vslvm_volume_group
def GetRootFileEntry(self):
"""Retrieves the root file entry.
Returns:
A file entry (instance of FileEntry).
"""
path_spec = lvm_path_spec.LVMPathSpec(
location=self.LOCATION_ROOT, parent=self._path_spec.parent)
return self.GetFileEntryByPathSpec(path_spec)
|
dc3-plaso/dfvfs
|
dfvfs/vfs/lvm_file_system.py
|
Python
|
apache-2.0
| 4,506
|
__all__ = ['setup', 'get_db']
from modelplus.store import redis_db, sqlite_db
store = None
def setup(params):
"""
'redis' : { host, port, db }
'sqlite' : { file }
'mysql' : { host, port, db }
'riak' : { host, port, bucket }
'mongodb' : { host, port, bucket }
"""
global store
kwargs = params.get('redis')
if kwargs:
store = redis_db.setup(**kwargs)
kwargs = params.get('sqlite')
if kwargs:
store = sqlite_db.setup(**kwargs)
def get_db():
return store
|
koblas/modelplus
|
modelplus/__init__.py
|
Python
|
mit
| 552
|
import pytest
import py
import os
from _pytest.config import get_config, PytestPluginManager
from _pytest.main import EXIT_NOTESTSCOLLECTED
@pytest.fixture
def pytestpm():
return PytestPluginManager()
class TestPytestPluginInteractions:
def test_addhooks_conftestplugin(self, testdir):
testdir.makepyfile(newhooks="""
def pytest_myhook(xyz):
"new hook"
""")
conf = testdir.makeconftest("""
import sys ; sys.path.insert(0, '.')
import newhooks
def pytest_addhooks(pluginmanager):
pluginmanager.addhooks(newhooks)
def pytest_myhook(xyz):
return xyz + 1
""")
config = get_config()
pm = config.pluginmanager
pm.hook.pytest_addhooks.call_historic(
kwargs=dict(pluginmanager=config.pluginmanager))
config.pluginmanager._importconftest(conf)
#print(config.pluginmanager.get_plugins())
res = config.hook.pytest_myhook(xyz=10)
assert res == [11]
def test_addhooks_nohooks(self, testdir):
testdir.makeconftest("""
import sys
def pytest_addhooks(pluginmanager):
pluginmanager.addhooks(sys)
""")
res = testdir.runpytest()
assert res.ret != 0
res.stderr.fnmatch_lines([
"*did not find*sys*"
])
def test_namespace_early_from_import(self, testdir):
p = testdir.makepyfile("""
from pytest import Item
from pytest import Item as Item2
assert Item is Item2
""")
result = testdir.runpython(p)
assert result.ret == 0
def test_do_ext_namespace(self, testdir):
testdir.makeconftest("""
def pytest_namespace():
return {'hello': 'world'}
""")
p = testdir.makepyfile("""
from pytest import hello
import pytest
def test_hello():
assert hello == "world"
assert 'hello' in pytest.__all__
""")
reprec = testdir.inline_run(p)
reprec.assertoutcome(passed=1)
def test_do_option_postinitialize(self, testdir):
config = testdir.parseconfigure()
assert not hasattr(config.option, 'test123')
p = testdir.makepyfile("""
def pytest_addoption(parser):
parser.addoption('--test123', action="store_true",
default=True)
""")
config.pluginmanager._importconftest(p)
assert config.option.test123
def test_configure(self, testdir):
config = testdir.parseconfig()
l = []
class A:
def pytest_configure(self, config):
l.append(self)
config.pluginmanager.register(A())
assert len(l) == 0
config._do_configure()
assert len(l) == 1
config.pluginmanager.register(A()) # leads to a configured() plugin
assert len(l) == 2
assert l[0] != l[1]
config._ensure_unconfigure()
config.pluginmanager.register(A())
assert len(l) == 2
def test_hook_tracing(self):
pytestpm = get_config().pluginmanager # fully initialized with plugins
saveindent = []
class api1:
def pytest_plugin_registered(self):
saveindent.append(pytestpm.trace.root.indent)
class api2:
def pytest_plugin_registered(self):
saveindent.append(pytestpm.trace.root.indent)
raise ValueError()
l = []
pytestpm.trace.root.setwriter(l.append)
undo = pytestpm.enable_tracing()
try:
indent = pytestpm.trace.root.indent
p = api1()
pytestpm.register(p)
assert pytestpm.trace.root.indent == indent
assert len(l) >= 2
assert 'pytest_plugin_registered' in l[0]
assert 'finish' in l[1]
l[:] = []
with pytest.raises(ValueError):
pytestpm.register(api2())
assert pytestpm.trace.root.indent == indent
assert saveindent[0] > indent
finally:
undo()
def test_warn_on_deprecated_multicall(self, pytestpm):
warnings = []
class get_warnings:
def pytest_logwarning(self, message):
warnings.append(message)
class Plugin:
def pytest_configure(self, __multicall__):
pass
pytestpm.register(get_warnings())
before = list(warnings)
pytestpm.register(Plugin())
assert len(warnings) == len(before) + 1
assert "deprecated" in warnings[-1]
def test_warn_on_deprecated_addhooks(self, pytestpm):
warnings = []
class get_warnings:
def pytest_logwarning(self, code, fslocation, message, nodeid):
warnings.append(message)
class Plugin:
def pytest_testhook():
pass
pytestpm.register(get_warnings())
before = list(warnings)
pytestpm.addhooks(Plugin())
assert len(warnings) == len(before) + 1
assert "deprecated" in warnings[-1]
def test_namespace_has_default_and_env_plugins(testdir):
p = testdir.makepyfile("""
import pytest
pytest.mark
""")
result = testdir.runpython(p)
assert result.ret == 0
def test_default_markers(testdir):
result = testdir.runpytest("--markers")
result.stdout.fnmatch_lines([
"*tryfirst*first*",
"*trylast*last*",
])
def test_importplugin_issue375(testdir, pytestpm):
"""Don't hide import errors when importing plugins and provide
an easy to debug message.
"""
testdir.syspathinsert(testdir.tmpdir)
testdir.makepyfile(qwe="import aaaa")
with pytest.raises(ImportError) as excinfo:
pytestpm.import_plugin("qwe")
expected = '.*Error importing plugin "qwe": No module named \'?aaaa\'?'
assert py.std.re.match(expected, str(excinfo.value))
class TestPytestPluginManager:
def test_register_imported_modules(self):
pm = PytestPluginManager()
mod = py.std.types.ModuleType("x.y.pytest_hello")
pm.register(mod)
assert pm.is_registered(mod)
l = pm.get_plugins()
assert mod in l
pytest.raises(ValueError, "pm.register(mod)")
pytest.raises(ValueError, lambda: pm.register(mod))
#assert not pm.is_registered(mod2)
assert pm.get_plugins() == l
def test_canonical_import(self, monkeypatch):
mod = py.std.types.ModuleType("pytest_xyz")
monkeypatch.setitem(py.std.sys.modules, 'pytest_xyz', mod)
pm = PytestPluginManager()
pm.import_plugin('pytest_xyz')
assert pm.get_plugin('pytest_xyz') == mod
assert pm.is_registered(mod)
def test_consider_module(self, testdir, pytestpm):
testdir.syspathinsert()
testdir.makepyfile(pytest_p1="#")
testdir.makepyfile(pytest_p2="#")
mod = py.std.types.ModuleType("temp")
mod.pytest_plugins = ["pytest_p1", "pytest_p2"]
pytestpm.consider_module(mod)
assert pytestpm.get_plugin("pytest_p1").__name__ == "pytest_p1"
assert pytestpm.get_plugin("pytest_p2").__name__ == "pytest_p2"
def test_consider_module_import_module(self, testdir):
pytestpm = get_config().pluginmanager
mod = py.std.types.ModuleType("x")
mod.pytest_plugins = "pytest_a"
aplugin = testdir.makepyfile(pytest_a="#")
reprec = testdir.make_hook_recorder(pytestpm)
#syspath.prepend(aplugin.dirpath())
py.std.sys.path.insert(0, str(aplugin.dirpath()))
pytestpm.consider_module(mod)
call = reprec.getcall(pytestpm.hook.pytest_plugin_registered.name)
assert call.plugin.__name__ == "pytest_a"
# check that it is not registered twice
pytestpm.consider_module(mod)
l = reprec.getcalls("pytest_plugin_registered")
assert len(l) == 1
def test_consider_env_fails_to_import(self, monkeypatch, pytestpm):
monkeypatch.setenv('PYTEST_PLUGINS', 'nonexisting', prepend=",")
with pytest.raises(ImportError):
pytestpm.consider_env()
def test_plugin_skip(self, testdir, monkeypatch):
p = testdir.makepyfile(skipping1="""
import pytest
pytest.skip("hello")
""")
p.copy(p.dirpath("skipping2.py"))
monkeypatch.setenv("PYTEST_PLUGINS", "skipping2")
result = testdir.runpytest("-rw", "-p", "skipping1", syspathinsert=True)
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stdout.fnmatch_lines([
"WI1*skipped plugin*skipping1*hello*",
"WI1*skipped plugin*skipping2*hello*",
])
def test_consider_env_plugin_instantiation(self, testdir, monkeypatch, pytestpm):
testdir.syspathinsert()
testdir.makepyfile(xy123="#")
monkeypatch.setitem(os.environ, 'PYTEST_PLUGINS', 'xy123')
l1 = len(pytestpm.get_plugins())
pytestpm.consider_env()
l2 = len(pytestpm.get_plugins())
assert l2 == l1 + 1
assert pytestpm.get_plugin('xy123')
pytestpm.consider_env()
l3 = len(pytestpm.get_plugins())
assert l2 == l3
def test_pluginmanager_ENV_startup(self, testdir, monkeypatch):
testdir.makepyfile(pytest_x500="#")
p = testdir.makepyfile("""
import pytest
def test_hello(pytestconfig):
plugin = pytestconfig.pluginmanager.get_plugin('pytest_x500')
assert plugin is not None
""")
monkeypatch.setenv('PYTEST_PLUGINS', 'pytest_x500', prepend=",")
result = testdir.runpytest(p, syspathinsert=True)
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
def test_import_plugin_importname(self, testdir, pytestpm):
pytest.raises(ImportError, 'pytestpm.import_plugin("qweqwex.y")')
pytest.raises(ImportError, 'pytestpm.import_plugin("pytest_qweqwx.y")')
testdir.syspathinsert()
pluginname = "pytest_hello"
testdir.makepyfile(**{pluginname: ""})
pytestpm.import_plugin("pytest_hello")
len1 = len(pytestpm.get_plugins())
pytestpm.import_plugin("pytest_hello")
len2 = len(pytestpm.get_plugins())
assert len1 == len2
plugin1 = pytestpm.get_plugin("pytest_hello")
assert plugin1.__name__.endswith('pytest_hello')
plugin2 = pytestpm.get_plugin("pytest_hello")
assert plugin2 is plugin1
def test_import_plugin_dotted_name(self, testdir, pytestpm):
pytest.raises(ImportError, 'pytestpm.import_plugin("qweqwex.y")')
pytest.raises(ImportError, 'pytestpm.import_plugin("pytest_qweqwex.y")')
testdir.syspathinsert()
testdir.mkpydir("pkg").join("plug.py").write("x=3")
pluginname = "pkg.plug"
pytestpm.import_plugin(pluginname)
mod = pytestpm.get_plugin("pkg.plug")
assert mod.x == 3
def test_consider_conftest_deps(self, testdir, pytestpm):
mod = testdir.makepyfile("pytest_plugins='xyz'").pyimport()
with pytest.raises(ImportError):
pytestpm.consider_conftest(mod)
class TestPytestPluginManagerBootstrapming:
def test_preparse_args(self, pytestpm):
pytest.raises(ImportError, lambda:
pytestpm.consider_preparse(["xyz", "-p", "hello123"]))
def test_plugin_prevent_register(self, pytestpm):
pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
l1 = pytestpm.get_plugins()
pytestpm.register(42, name="abc")
l2 = pytestpm.get_plugins()
assert len(l2) == len(l1)
assert 42 not in l2
def test_plugin_prevent_register_unregistered_alredy_registered(self, pytestpm):
pytestpm.register(42, name="abc")
l1 = pytestpm.get_plugins()
assert 42 in l1
pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
l2 = pytestpm.get_plugins()
assert 42 not in l2
|
youtube/cobalt
|
third_party/web_platform_tests/tools/pytest/testing/test_pluginmanager.py
|
Python
|
bsd-3-clause
| 12,170
|
from lxml import etree
import datetime, re
import asyncio, aiohttp
NINTENDO_LOGIN_PAGE = "https://id.nintendo.net/oauth/authorize"
SPLATNET_CALLBACK_URL = "https://splatoon.nintendo.net/users/auth/nintendo/callback"
SPLATNET_CLIENT_ID = "12af3d0a3a1f441eb900411bb50a835a"
SPLATNET_SCHEDULE_URL = "https://splatoon.nintendo.net/schedule"
class Rotation(object):
def __init__(self):
self.start = None
self.end = None
self.turf_maps = []
self.ranked_mode = None
self.ranked_maps = []
@property
def is_over(self):
return self.end < datetime.datetime.utcnow()
def __str__(self):
now = datetime.datetime.utcnow()
prefix = ''
if self.start > now:
minutes_delta = int((self.start - now) / datetime.timedelta(minutes=1))
hours = int(minutes_delta / 60)
minutes = minutes_delta % 60
prefix = '**In {0} hours and {1} minutes**:\n'.format(hours, minutes)
else:
prefix = '**Current Rotation**:\n'
fmt = 'Turf War is {0[0]} and {0[1]}\n{1} is {2[0]} and {2[1]}'
return prefix + fmt.format(self.turf_maps, self.ranked_mode, self.ranked_maps)
# based on https://github.com/Wiwiweb/SakuraiBot/blob/master/src/sakuraibot.py
async def get_new_splatnet_cookie(username, password):
parameters = {'client_id': SPLATNET_CLIENT_ID,
'response_type': 'code',
'redirect_uri': SPLATNET_CALLBACK_URL,
'username': username,
'password': password}
async with aiohttp.post(NINTENDO_LOGIN_PAGE, data=parameters) as response:
cookie = response.history[-1].cookies.get('_wag_session')
if cookie is None:
print(req)
raise Exception("Couldn't retrieve cookie")
return cookie
def parse_splatnet_time(timestr):
# time is given as "MM/DD at H:MM [p|a].m. (PDT|PST)"
# there is a case where it goes over the year, e.g. 12/31 at ... and then 1/1 at ...
# this case is kind of weird though and is currently unexpected
# it could even end up being e.g. 12/31/2015 ... and then 1/1/2016 ...
# we'll never know
regex = r'(?P<month>\d+)\/(?P<day>\d+)\s*at\s*(?P<hour>\d+)\:(?P<minutes>\d+)\s*(?P<p>a\.m\.|p\.m\.)\s*\((?P<tz>.+)\
)'
m = re.match(regex, timestr.strip())
if m is None:
raise RuntimeError('Apparently the timestamp "{}" does not match the regex.'.format(timestr))
matches = m.groupdict()
tz = matches['tz'].strip().upper()
offset = None
if tz == 'PDT':
# EDT is UTC - 4, PDT is UTC - 7, so you need +7 to make it UTC
offset = +7
elif tz == 'PST':
# EST is UTC - 5, PST is UTC - 8, so you need +8 to make it UTC
offset = +8
else:
raise RuntimeError('Unknown timezone found: {}'.format(tz))
pm = matches['p'].replace('.', '') # a.m. -> am
current_time = datetime.datetime.utcnow()
# Kind of hacky.
fmt = "{2}/{0[month]}/{0[day]} {0[hour]}:{0[minutes]} {1}".format(matches, pm, current_time.year)
splatoon_time = datetime.datetime.strptime(fmt, '%Y/%m/%d %I:%M %p') + datetime.timedelta(hours=offset)
# check for new year
if current_time.month == 12 and splatoon_time.month == 1:
splatoon_time.replace(current_time.year + 1)
return splatoon_time
async def get_splatnet_schedule(splatnet_cookie):
cookies = {'_wag_session': splatnet_cookie}
"""
This is repeated 3 times:
<span class"stage-schedule"> ... </span> <--- figure out how to parse this
<div class="stage-list">
<div class="match-type">
<span class="icon-regular-match"></span> <--- turf war
</div>
... <span class="map-name"> ... </span>
... <span class="map-name"> ... </span>
</div>
<div class="stage-list">
<div class="match-type">
<span class="icon-earnest-match"></span> <--- ranked
</div>
... <span class="rule-description"> ... </span> <--- Splat Zones, Rainmaker, Tower Control
... <span class="map-name"> ... </span>
... <span class="map-name"> ... </span>
</div>
"""
schedule = []
async with aiohttp.get(SPLATNET_SCHEDULE_URL, cookies=cookies, data={'locale':"en"}) as response:
text = await response.text()
root = etree.fromstring(text, etree.HTMLParser())
stage_schedule_nodes = root.xpath("//*[@class='stage-schedule']")
stage_list_nodes = root.xpath("//*[@class='stage-list']")
if len(stage_schedule_nodes)*2 != len(stage_list_nodes):
raise RuntimeError("SplatNet changed, need to update the parsing!")
for sched_node in stage_schedule_nodes:
r = Rotation()
start_time, end_time = sched_node.text.split("~")
r.start = parse_splatnet_time(start_time)
r.end = parse_splatnet_time(end_time)
tw_list_node = stage_list_nodes.pop(0)
r.turf_maps = tw_list_node.xpath(".//*[@class='map-name']/text()")
ranked_list_node = stage_list_nodes.pop(0)
r.ranked_maps = ranked_list_node.xpath(".//*[@class='map-name']/text()")
r.ranked_mode = ranked_list_node.xpath(".//*[@class='rule-description']/text()")[0]
schedule.append(r)
return schedule
|
plusreed/foxpy
|
plugins/utils/maps.py
|
Python
|
mit
| 5,375
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------
# Copyright (c) 2010-2019 Denis Machard
# This file is part of the extensive automation project
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
# -------------------------------------------------------------------
import sys
import os
try:
import cPickle
except ImportError: # support python 3
import pickle as cPickle
STORAGE_MODE_FILE = "FILE"
STORAGE_MODE_MEM = "MEM"
class TestDataStorage:
"""
Test data storage
"""
def __init__(self, path, storageMode=STORAGE_MODE_MEM):
"""
Constructor for the test data storage
"""
self.__filename = 'storage.dat'
self.__path = path
self.__storageMode = storageMode
self.__storageData = {}
def save_data(self, data):
"""
Save data on the storage
"""
validData = False
if isinstance(data, str):
validData = True
if isinstance(data, list):
validData = True
if isinstance(data, dict):
validData = True
if isinstance(data, tuple):
validData = True
if self.__storageMode == STORAGE_MODE_MEM:
if validData:
self.__storageData = data
storagePath = '%s/%s' % (self.__path, self.__filename)
storagePath = os.path.normpath(storagePath)
try:
if validData:
fd = open(storagePath, 'wb')
fd.write(cPickle.dumps(data))
fd.close()
except Exception as e:
self.error("[save_data] %s" % str(e))
def load_data(self):
"""
Load data from the storage
"""
if self.__storageMode == STORAGE_MODE_MEM:
return self.__storageData
storagePath = '%s/%s' % (self.__path, self.__filename)
storagePath = os.path.normpath(storagePath)
# check if the storage.dat file exists ?
if not os.path.exists(storagePath):
return {}
# read the file and unpickle the content
try:
fd = open(storagePath, "r")
data = fd.read()
fd.close()
return cPickle.loads(data)
except Exception as e:
self.error("[load_data] %s" % str(e))
return None
def reset_data(self):
"""
Reset data from the storage
"""
if self.__storageMode == STORAGE_MODE_MEM:
del self.__storageData
self.__storageData = {}
return True
storagePath = '%s/%s' % (self.__path, self.__filename)
storagePath = os.path.normpath(storagePath)
# check if the file exists?
if not os.path.exists(storagePath):
return {}
# Empty the file storage.dat
try:
fd = open(storagePath, "wb")
fd.write("")
fd.close()
return True
except Exception as e:
self.error("[reset_data] %s" % str(e))
return False
def error(self, err):
"""
Log error
"""
sys.stderr.write("[%s] %s\n" % (self.__class__.__name__, err))
TDS = None
def instance():
"""
"""
if TDS:
return TDS
def initialize(path):
"""
"""
global TDS
TDS = TestDataStorage(path=path)
def finalize():
"""
"""
global TDS
if TDS:
TDS = None
|
dmachard/extensive-testing
|
src/ea/testexecutorlib/TestDataStorage.py
|
Python
|
lgpl-2.1
| 4,179
|
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from tests import support
from tests.support import mock
import dnf.cli.commands.makecache as makecache
import dnf.pycomp
class MakeCacheCommandTest(support.TestCase):
def setUp(self):
self.base = support.MockBase()
self.cli = self.base.mock_cli()
@staticmethod
@mock.patch('dnf.Base.fill_sack', new=mock.MagicMock())
def _do_makecache(cmd):
return cmd.run(['timer'])
def assert_last_info(self, logger, msg):
self.assertEqual(logger.info.mock_calls[-1], mock.call(msg))
@mock.patch('dnf.cli.commands.makecache.logger',
new_callable=support.mock_logger)
@mock.patch('dnf.cli.commands._', dnf.pycomp.NullTranslations().ugettext)
@mock.patch('dnf.util.on_ac_power', return_value=True)
def test_makecache_timer(self, _on_ac_power, logger):
cmd = makecache.MakeCacheCommand(self.cli)
self.base.conf.metadata_timer_sync = 0
self.assertFalse(self._do_makecache(cmd))
self.assert_last_info(logger, u'Metadata timer caching disabled.')
self.base.conf.metadata_timer_sync = 5 # resync after 5 seconds
self.base._persistor.since_last_makecache = mock.Mock(return_value=3)
self.assertFalse(self._do_makecache(cmd))
self.assert_last_info(logger, u'Metadata cache refreshed recently.')
self.base._persistor.since_last_makecache = mock.Mock(return_value=10)
self.base._sack = 'nonempty'
r = support.MockRepo("glimpse", None)
self.base.repos.add(r)
# regular case 1: metadata is already expired:
r.metadata_expire_in = mock.Mock(return_value=(False, 0))
r.sync_strategy = dnf.repo.SYNC_TRY_CACHE
self.assertTrue(self._do_makecache(cmd))
self.assert_last_info(logger, u'Metadata cache created.')
self.assertTrue(r._expired)
r._expired = False
# regular case 2: metadata is cached and will expire later than
# metadata_timer_sync:
r.metadata_expire_in = mock.Mock(return_value=(True, 100))
r.sync_strategy = dnf.repo.SYNC_TRY_CACHE
self.assertTrue(self._do_makecache(cmd))
self.assert_last_info(logger, u'Metadata cache created.')
self.assertFalse(r._expired)
# regular case 3: metadata is cached but will eqpire before
# metadata_timer_sync:
r.metadata_expire_in = mock.Mock(return_value=(True, 4))
r.sync_strategy = dnf.repo.SYNC_TRY_CACHE
self.assertTrue(self._do_makecache(cmd))
self.assert_last_info(logger, u'Metadata cache created.')
self.assertTrue(r._expired)
@mock.patch('dnf.cli.commands.makecache.logger',
new_callable=support.mock_logger)
@mock.patch('dnf.cli.commands._', dnf.pycomp.NullTranslations().ugettext)
@mock.patch('dnf.util.on_ac_power', return_value=False)
def test_makecache_timer_battery(self, _on_ac_power, logger):
cmd = makecache.MakeCacheCommand(self.cli)
self.base.conf.metadata_timer_sync = 5
self.assertFalse(self._do_makecache(cmd))
msg = u'Metadata timer caching disabled when running on a battery.'
self.assert_last_info(logger, msg)
@mock.patch('dnf.cli.commands._', dnf.pycomp.NullTranslations().ugettext)
@mock.patch('dnf.util.on_ac_power', return_value=None)
def test_makecache_timer_battery2(self, _on_ac_power):
cmd = makecache.MakeCacheCommand(self.cli)
self.base.conf.metadata_timer_sync = 5
self.assertTrue(self._do_makecache(cmd))
|
kudlav/dnf
|
tests/cli/commands/test_makecache.py
|
Python
|
gpl-2.0
| 4,524
|
from json import dumps, loads
from datetime import datetime
EPOCH = datetime.utcfromtimestamp(0)
ADAYINSECONDS = 24 * 3600
def jlencode(iterable):
if isinstance(iterable, (dict, str, unicode)):
iterable = [iterable]
return u'\n'.join(jsonencode(o) for o in iterable)
def jldecode(lineiterable):
for line in lineiterable:
yield loads(line)
def jsonencode(o):
return dumps(o, default=jsondefault)
def jsondefault(o):
if isinstance(o, datetime):
delta = o - EPOCH
u = delta.microseconds
s = delta.seconds
d = delta.days
millis = (u + (s + d * ADAYINSECONDS) * 1e6) / 1000
return int(millis)
else:
return str(o)
|
kalessin/python-hubstorage
|
hubstorage/serialization.py
|
Python
|
bsd-3-clause
| 714
|
# This code was originally contributed by Jeffrey Harris.
import datetime
import struct
from six.moves import winreg
from six import text_type
try:
import ctypes
from ctypes import wintypes
except ValueError:
# ValueError is raised on non-Windows systems for some horrible reason.
raise ImportError("Running tzwin on non-Windows system")
from ._common import tzname_in_python2
__all__ = ["tzwin", "tzwinlocal", "tzres"]
ONEWEEK = datetime.timedelta(7)
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
def _settzkeyname():
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try:
winreg.OpenKey(handle, TZKEYNAMENT).Close()
TZKEYNAME = TZKEYNAMENT
except WindowsError:
TZKEYNAME = TZKEYNAME9X
handle.Close()
return TZKEYNAME
TZKEYNAME = _settzkeyname()
class tzres(object):
"""
Class for accessing `tzres.dll`, which contains timezone name related
resources.
..versionadded:: 2.5.0
"""
p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char
def __init__(self, tzres_loc='tzres.dll'):
# Load the user32 DLL so we can load strings from tzres
user32 = ctypes.WinDLL('user32')
# Specify the LoadStringW function
user32.LoadStringW.argtypes = (wintypes.HINSTANCE,
wintypes.UINT,
wintypes.LPWSTR,
ctypes.c_int)
self.LoadStringW = user32.LoadStringW
self._tzres = ctypes.WinDLL(tzres_loc)
self.tzres_loc = tzres_loc
def load_name(self, offset):
"""
Load a timezone name from a DLL offset (integer).
>>> from dateutil.tzwin import tzres
>>> tzr = tzres()
>>> print(tzr.load_name(112))
'Eastern Standard Time'
:param offset:
A positive integer value referring to a string from the tzres dll.
..note:
Offsets found in the registry are generally of the form
`@tzres.dll,-114`. The offset in this case if 114, not -114.
"""
resource = self.p_wchar()
lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR)
nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0)
return resource[:nchar]
def name_from_string(self, tzname_str):
"""
Parse strings as returned from the Windows registry into the time zone
name as defined in the registry.
>>> from dateutil.tzwin import tzres
>>> tzr = tzres()
>>> print(tzr.name_from_string('@tzres.dll,-251'))
'Dateline Daylight Time'
>>> print(tzr.name_from_string('Eastern Standard Time'))
'Eastern Standard Time'
:param tzname_str:
A timezone name string as returned from a Windows registry key.
:return:
Returns the localized timezone string from tzres.dll if the string
is of the form `@tzres.dll,-offset`, else returns the input string.
"""
if not tzname_str.startswith('@'):
return tzname_str
name_splt = tzname_str.split(',-')
try:
offset = int(name_splt[1])
except:
raise ValueError("Malformed timezone string.")
return self.load_name(offset)
class tzwinbase(datetime.tzinfo):
"""tzinfo class based on win32's timezones available in the registry."""
def __eq__(self, other):
# Compare on all relevant dimensions, including name.
return (isinstance(other, tzwinbase) and
(self._stdoffset == other._stdoffset and
self._dstoffset == other._dstoffset and
self._stddayofweek == other._stddayofweek and
self._dstdayofweek == other._dstdayofweek and
self._stdweeknumber == other._stdweeknumber and
self._dstweeknumber == other._dstweeknumber and
self._stdhour == other._stdhour and
self._dsthour == other._dsthour and
self._stdminute == other._stdminute and
self._dstminute == other._dstminute and
self._stdname == other._stdname and
self._dstname == other._dstname))
def __ne__(self, other):
return not self.__eq__(other)
def utcoffset(self, dt):
if self._isdst(dt):
return datetime.timedelta(minutes=self._dstoffset)
else:
return datetime.timedelta(minutes=self._stdoffset)
def dst(self, dt):
if self._isdst(dt):
minutes = self._dstoffset - self._stdoffset
return datetime.timedelta(minutes=minutes)
else:
return datetime.timedelta(0)
@tzname_in_python2
def tzname(self, dt):
if self._isdst(dt):
return self._dstname
else:
return self._stdname
@staticmethod
def list():
"""Return a list of all time zones known to the system."""
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
tzkey = winreg.OpenKey(handle, TZKEYNAME)
result = [winreg.EnumKey(tzkey, i)
for i in range(winreg.QueryInfoKey(tzkey)[0])]
tzkey.Close()
handle.Close()
return result
def display(self):
return self._display
def _isdst(self, dt):
if not self._dstmonth:
# dstmonth == 0 signals the zone has no daylight saving time
return False
dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek,
self._dsthour, self._dstminute,
self._dstweeknumber)
dstoff = picknthweekday(dt.year, self._stdmonth, self._stddayofweek,
self._stdhour, self._stdminute,
self._stdweeknumber)
if dston < dstoff:
return dston <= dt.replace(tzinfo=None) < dstoff
else:
return not dstoff <= dt.replace(tzinfo=None) < dston
class tzwin(tzwinbase):
def __init__(self, name):
self._name = name
# multiple contexts only possible in 2.7 and 3.1, we still support 2.6
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
tzkeyname = text_type("{kn}\{name}").format(kn=TZKEYNAME, name=name)
with winreg.OpenKey(handle, tzkeyname) as tzkey:
keydict = valuestodict(tzkey)
self._stdname = keydict["Std"]
self._dstname = keydict["Dlt"]
self._display = keydict["Display"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=3l16h", keydict["TZI"])
self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[4:9]
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[12:17]
def __repr__(self):
return "tzwin(%s)" % repr(self._name)
def __reduce__(self):
return (self.__class__, (self._name,))
class tzwinlocal(tzwinbase):
def __init__(self):
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
keydict = valuestodict(tzlocalkey)
self._stdname = keydict["StandardName"]
self._dstname = keydict["DaylightName"]
try:
tzkeyname = text_type('{kn}\{sn}').format(kn=TZKEYNAME,
sn=self._stdname)
with winreg.OpenKey(handle, tzkeyname) as tzkey:
_keydict = valuestodict(tzkey)
self._display = _keydict["Display"]
except OSError:
self._display = None
self._stdoffset = -keydict["Bias"]-keydict["StandardBias"]
self._dstoffset = self._stdoffset-keydict["DaylightBias"]
# For reasons unclear, in this particular key, the day of week has been
# moved to the END of the SYSTEMTIME structure.
tup = struct.unpack("=8h", keydict["StandardStart"])
(self._stdmonth,
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[1:5]
self._stddayofweek = tup[7]
tup = struct.unpack("=8h", keydict["DaylightStart"])
(self._dstmonth,
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[1:5]
self._dstdayofweek = tup[7]
def __repr__(self):
return "tzwinlocal()"
def __str__(self):
# str will return the standard name, not the daylight name.
return "tzwinlocal(%s)" % repr(self._stdname)
def __reduce__(self):
return (self.__class__, ())
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
first = datetime.datetime(year, month, 1, hour, minute)
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
# Because 7 % 7 = 0
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
if (wd.month != month):
wd -= ONEWEEK
return wd
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dout = {}
size = winreg.QueryInfoKey(key)[1]
tz_res = None
for i in range(size):
key_name, value, dtype = winreg.EnumValue(key, i)
if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN:
# If it's a DWORD (32-bit integer), it's stored as unsigned - convert
# that to a proper signed integer
if value & (1 << 31):
value = value - (1 << 32)
elif dtype == winreg.REG_SZ:
# If it's a reference to the tzres DLL, load the actual string
if value.startswith('@tzres'):
tz_res = tz_res or tzres()
value = tz_res.name_from_string(value)
value = value.rstrip('\x00') # Remove trailing nulls
dout[key_name] = value
return dout
|
Maximilian-Reuter/SickRage-1
|
lib/dateutil/tz/win.py
|
Python
|
gpl-3.0
| 10,943
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import job_search_custom_ranking_search
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
def test_search_jobs_custom_ranking(tenant):
jobs = job_search_custom_ranking_search.search_jobs(PROJECT_ID, tenant)
for job in jobs:
assert "projects/" in job
|
googleapis/python-talent
|
samples/snippets/job_search_custom_ranking_search_test.py
|
Python
|
apache-2.0
| 854
|
import unittest
import tests
unittest.TextTestRunner(verbosity=2).run(tests.suite())
|
JasonLai256/pyExpenses
|
run_test.py
|
Python
|
bsd-3-clause
| 86
|
from keras.applications import imagenet_utils
from keras.applications import mobilenet
def dummyPreprocessInput(image):
image -= 127.5
return image
def getPreprocessFunction(preprocessType):
if preprocessType == "dummy":
return dummyPreprocessInput
elif preprocessType == "mobilenet":
return mobilenet.preprocess_input
elif preprocessType == "imagenet":
return imagenet_utils.preprocess_input
else:
raise Exception(preprocessType + " not supported")
|
SlipknotTN/Dogs-Vs-Cats-Playground
|
deep_learning/keras/lib/preprocess/preprocess.py
|
Python
|
mit
| 511
|
# Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the story viewer page"""
from __future__ import annotations
import logging
from core import feconf
from core import utils
from core.constants import constants
from core.controllers import acl_decorators
from core.controllers import base
from core.domain import learner_progress_services
from core.domain import question_services
from core.domain import skill_fetchers
from core.domain import story_domain
from core.domain import story_fetchers
from core.domain import story_services
from core.domain import summary_services
from core.domain import topic_fetchers
class StoryPageDataHandler(base.BaseHandler):
"""Manages the data that needs to be displayed to a learner on the
story viewer page.
"""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS = {
'classroom_url_fragment': constants.SCHEMA_FOR_CLASSROOM_URL_FRAGMENTS,
'topic_url_fragment': constants.SCHEMA_FOR_TOPIC_URL_FRAGMENTS,
'story_url_fragment': constants.SCHEMA_FOR_STORY_URL_FRAGMENTS,
}
HANDLER_ARGS_SCHEMAS = {
'GET': {},
}
@acl_decorators.can_access_story_viewer_page
def get(self, story_id):
"""Handles GET requests."""
story = story_fetchers.get_story_by_id(story_id)
topic_id = story.corresponding_topic_id
topic_name = topic_fetchers.get_topic_by_id(topic_id).name
completed_node_ids = [
completed_node.id for completed_node in
story_fetchers.get_completed_nodes_in_story(self.user_id, story_id)]
ordered_node_dicts = [
node.to_dict() for node in story.story_contents.get_ordered_nodes()
]
for node in ordered_node_dicts:
node['completed'] = False
if node['id'] in completed_node_ids:
node['completed'] = True
exp_ids = [
node['exploration_id'] for node in ordered_node_dicts]
exp_summary_dicts = (
summary_services.get_displayable_exp_summary_dicts_matching_ids(
exp_ids, user=self.user))
for ind, node in enumerate(ordered_node_dicts):
node['exp_summary_dict'] = exp_summary_dicts[ind]
self.values.update({
'story_id': story.id,
'story_title': story.title,
'story_description': story.description,
'story_nodes': ordered_node_dicts,
'topic_name': topic_name,
'meta_tag_content': story.meta_tag_content
})
self.render_json(self.values)
class StoryProgressHandler(base.BaseHandler):
"""Marks a story node as completed after completing and returns exp ID of
next chapter (if applicable).
"""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS = {
'classroom_url_fragment': constants.SCHEMA_FOR_CLASSROOM_URL_FRAGMENTS,
'topic_url_fragment': constants.SCHEMA_FOR_TOPIC_URL_FRAGMENTS,
'story_url_fragment': constants.SCHEMA_FOR_STORY_URL_FRAGMENTS,
'node_id': {
'schema': {
'type': 'basestring',
'validators': [{
'id': 'is_regex_matched',
'regex_pattern': ('%s[0-9]+' % story_domain.NODE_ID_PREFIX)
}]
}
}
}
HANDLER_ARGS_SCHEMAS = {
'GET': {},
'POST': {}
}
def _record_node_completion(
self, story_id, node_id, completed_node_ids, ordered_nodes):
"""Records node completion."""
if not constants.ENABLE_NEW_STRUCTURE_VIEWER_UPDATES:
raise self.PageNotFoundException
try:
story_fetchers.get_node_index_by_story_id_and_node_id(
story_id, node_id)
except Exception as e:
raise self.PageNotFoundException(e)
next_exp_ids = []
next_node_id = None
if node_id not in completed_node_ids:
story_services.record_completed_node_in_story_context(
self.user_id, story_id, node_id)
completed_nodes = story_fetchers.get_completed_nodes_in_story(
self.user_id, story_id)
completed_node_ids = [
completed_node.id for completed_node in completed_nodes]
for node in ordered_nodes:
if node.id not in completed_node_ids:
next_exp_ids = [node.exploration_id]
next_node_id = node.id
break
return (next_exp_ids, next_node_id, completed_node_ids)
@acl_decorators.can_access_story_viewer_page
def get(self, story_id, node_id):
"""Handles GET requests."""
(
_, _, classroom_url_fragment, topic_url_fragment,
story_url_fragment, node_id) = self.request.path.split('/')
story = story_fetchers.get_story_by_id(story_id)
completed_nodes = story_fetchers.get_completed_nodes_in_story(
self.user_id, story_id)
ordered_nodes = story.story_contents.get_ordered_nodes()
# In case the user is a returning user and has completed nodes in the
# past, redirect to the story page so that the user can continue from
# where they had left off.
# If the node id is not the first node in the story, redirect to
# the story page.
if completed_nodes or node_id != ordered_nodes[0].id:
self.redirect(
'/learn/%s/%s/story/%s' % (
classroom_url_fragment, topic_url_fragment,
story_url_fragment))
return
(next_exp_ids, next_node_id, _) = (
self._record_node_completion(story_id, node_id, [], ordered_nodes))
if next_node_id is None:
self.redirect(
'/learn/%s/%s/story/%s' % (
classroom_url_fragment, topic_url_fragment,
story_url_fragment))
return
redirect_url = '%s/%s' % (
feconf.EXPLORATION_URL_PREFIX, next_exp_ids[0])
redirect_url = utils.set_url_query_parameter(
redirect_url, 'classroom_url_fragment', classroom_url_fragment)
redirect_url = utils.set_url_query_parameter(
redirect_url, 'topic_url_fragment', topic_url_fragment)
redirect_url = utils.set_url_query_parameter(
redirect_url, 'story_url_fragment', story_url_fragment)
redirect_url = utils.set_url_query_parameter(
redirect_url, 'node_id', next_node_id)
self.redirect(redirect_url)
@acl_decorators.can_access_story_viewer_page
def post(self, story_id, node_id):
story = story_fetchers.get_story_by_id(story_id)
if story is None:
logging.error(
'Could not find a story corresponding to '
'%s id.' % story_id)
self.render_json({})
return
topic = topic_fetchers.get_topic_by_id(story.corresponding_topic_id)
completed_nodes = story_fetchers.get_completed_nodes_in_story(
self.user_id, story_id)
completed_node_ids = [
completed_node.id for completed_node in completed_nodes]
ordered_nodes = story.story_contents.get_ordered_nodes()
(next_exp_ids, next_node_id, completed_node_ids) = (
self._record_node_completion(
story_id, node_id, completed_node_ids, ordered_nodes))
ready_for_review_test = False
exp_summaries = (
summary_services.get_displayable_exp_summary_dicts_matching_ids(
next_exp_ids))
# If there are no questions for any of the acquired skills that the
# learner has completed, do not show review tests.
acquired_skills = skill_fetchers.get_multi_skills(
story.get_acquired_skill_ids_for_node_ids(
completed_node_ids
))
acquired_skill_ids = [skill.id for skill in acquired_skills]
questions_available = len(
question_services.get_questions_by_skill_ids(
1, acquired_skill_ids, False)) > 0
learner_completed_story = len(completed_node_ids) == len(ordered_nodes)
learner_at_review_point_in_story = (
len(exp_summaries) != 0 and (
len(completed_node_ids) &
constants.NUM_EXPLORATIONS_PER_REVIEW_TEST == 0)
)
if questions_available and (
learner_at_review_point_in_story or learner_completed_story):
ready_for_review_test = True
# If there is no next_node_id, the story is marked as completed else
# mark the story as incomplete.
if next_node_id is None:
learner_progress_services.mark_story_as_completed(
self.user_id, story_id)
else:
learner_progress_services.record_story_started(
self.user_id, story.id)
completed_story_ids = (
learner_progress_services.get_all_completed_story_ids(
self.user_id))
story_ids_in_topic = []
for story in topic.canonical_story_references:
story_ids_in_topic.append(story.story_id)
is_topic_completed = set(story_ids_in_topic).intersection(
set(completed_story_ids))
# If at least one story in the topic is completed,
# mark the topic as learnt else mark it as partially learnt.
if not is_topic_completed:
learner_progress_services.record_topic_started(
self.user_id, topic.id)
else:
learner_progress_services.mark_topic_as_learnt(
self.user_id, topic.id)
return self.render_json({
'summaries': exp_summaries,
'ready_for_review_test': ready_for_review_test,
'next_node_id': next_node_id
})
|
brianrodri/oppia
|
core/controllers/story_viewer.py
|
Python
|
apache-2.0
| 10,500
|
# -*- coding: utf-8 -*-
# Copyright(C) 2015 Baptiste Delpey
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.backend import Module, BackendConfig
from weboob.capabilities.bank import CapBank, AccountNotFound
from weboob.capabilities.base import find_object
from weboob.tools.value import ValueBackendPassword, Value
from .proxy_browser import ProxyBrowser
__all__ = ['BnpcartesentrepriseModule']
class BnpcartesentrepriseModule(Module, CapBank):
NAME = 'bnpcards'
DESCRIPTION = u'BNP Cartes Entreprises'
MAINTAINER = u'Baptiste Delpey'
EMAIL = 'bdelpey@budget-insight.fr'
LICENSE = 'LGPLv3+'
VERSION = '1.6'
CONFIG = BackendConfig(ValueBackendPassword('login', label='Identifiant', masked=False),
ValueBackendPassword('password', label='Code personnel'),
Value('type', label='Profil de connexion', default='1',
choices={'1': 'Titulaire',
'2': 'Gestionnaire'}))
BROWSER = ProxyBrowser
def create_default_browser(self):
return self.create_browser(self.config['type'].get(),
self.config['login'].get(),
self.config['password'].get())
def get_account(self, _id):
return find_object(self.browser.iter_accounts(), id=_id, error=AccountNotFound)
def iter_accounts(self):
for acc in self.browser.iter_accounts():
acc._bisoftcap = {'all': {'softcap_day':5,'day_for_softcap':100}}
yield acc
def iter_coming(self, account):
for tr in self.browser.get_transactions(account):
if tr._coming:
yield tr
def iter_history(self, account):
for tr in self.browser.get_transactions(account):
if not tr._coming:
yield tr
|
vicnet/weboob
|
modules/bnpcards/module.py
|
Python
|
lgpl-3.0
| 2,575
|
# -*- coding:utf-8 -*-
#
# Copyright © 2015 The Spyder Development Team
# Copyright © 2014 Gonzalo Peña-Castellanos (@goanpeca)
#
# Licensed under the terms of the MIT License
"""
Application entry point.
"""
# Standard library imports
import os
import sys
# Local imports
from conda_manager.utils.qthelpers import qapplication
from conda_manager.widgets.main_window import MainWindow
# Set Windows taskbar icon
if os.name == 'nt':
try:
from ctypes import windll
windll.shell32.SetCurrentProcessExplicitAppUserModelID("conda-manager")
except AttributeError:
pass
def main():
app = qapplication(sys.argv, test_time=45)
window = MainWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
spyder-ide/conda-manager
|
conda_manager/app/main.py
|
Python
|
mit
| 764
|
# Global Forest Watch API
# Copyright (C) 2013 World Resource Institute
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""This module supports accessing countries data. todo- add loss outside plantations to umd"""
import json
from gfw import cdb
from gfw.forestchange import umd
from gfw import stories
class CountrySql(object):
INDEX = """
SELECT countries.iso, countries.name, countries.enabled,
countries.lat, countries.lng, countries.extent, countries.gva,
countries.gva_percent, countries.employment, countries.indepth,
countries.national_policy_link, countries.policy_links,
countries.national_policy_title, countries.convention_cbd,
countries.convention_unfccc, countries.convention_kyoto,
countries.convention_unccd, countries.convention_itta,
countries.convention_cites, countries.convention_ramsar,
countries.convention_world_heritage, countries.convention_nlbi,
countries.convention_ilo, countries.ministry_link,
countries.external_links, countries.dataset_link,
countries.emissions, countries.carbon_stocks,
countries.country_alt, alerts.count AS alerts_count
FROM gfw2_countries AS countries
LEFT OUTER JOIN (
SELECT COUNT(*) AS count, iso
FROM forma_api
WHERE date >= now() - INTERVAL '{interval}'
GROUP BY iso)
AS alerts ON alerts.iso = countries.iso
ORDER BY countries.name {order}
"""
SHOW = """
SELECT countries.iso, countries.name, countries.enabled,
countries.lat, countries.lng, countries.extent, countries.gva,
countries.gva_percent, countries.employment, countries.indepth,
countries.national_policy_link, countries.policy_links,
countries.national_policy_title, countries.convention_cbd,
countries.convention_unfccc, countries.convention_kyoto,
countries.convention_unccd, countries.convention_itta,
countries.convention_cites, countries.convention_ramsar,
countries.convention_world_heritage, countries.convention_nlbi,
countries.convention_ilo, countries.ministry_link,
countries.external_links, countries.dataset_link,
countries.emissions, countries.carbon_stocks,
countries.country_alt
FROM gfw2_countries AS countries
WHERE countries.iso = UPPER('{iso}')
LIMIT 1
"""
TOPO_JSON = """
SELECT the_geom
FROM forest_cov_glob_v3
WHERE country_code = UPPER('{iso}')
UNION
SELECT the_geom
FROM ne_50m_admin_0_countries
WHERE adm0_a3 = UPPER('{iso}')"""
SUBNAT_BOUNDS = """
SELECT cartodb_id, iso, id_1, name_1, bounds
FROM gadm_1_all
WHERE iso = UPPER('{iso}')
ORDER BY id_1 asc"""
TENURE = """
SELECT tenure_government, tenure_owned, tenure_owned_individuals,
tenure_reserved, GREATEST(tenure_government, tenure_owned,
tenure_owned_individuals, tenure_owned_individuals,
tenure_reserved) AS max
FROM gfw2_countries
WHERE iso = UPPER('{iso}')"""
FORESTS = """
SELECT unnest(array['forest_regenerated', 'forest_primary',
'forest_planted']) AS type, unnest(array[COALESCE(
forest_regenerated, 0), COALESCE(forest_primary, 0), COALESCE(
forest_planted, 0)]) AS percent
FROM gfw2_countries
WHERE iso = UPPER('{iso}')"""
FORMA = """
SELECT date_trunc('month', date) AS date, COUNT(*) AS alerts
FROM forma_api
WHERE iso = UPPER('{iso}')
GROUP BY date_trunc('month', date)
ORDER BY date_trunc('month', date) ASC"""
BOUNDS = """
SELECT bounds
FROM country_mask
WHERE code = UPPER('{iso}')"""
BURNED_FOREST = """
SELECT area_burned_forest, year
FROM burned_forest
WHERE iso = UPPER('{iso}')
ORDER BY year asc"""
REFORESTATION = """
SELECT reforestation_rate
FROM gfw2_countries
WHERE iso = UPPER('{iso}')"""
FOREST_CERTIFICATION = """
SELECT unnest(array['total_area_certified', 'percent_fsc',
'percent_pef','percent_other']) AS type, unnest(array[COALESCE(
total_area_certified, 0), COALESCE(percent_fsc, 0), COALESCE(
percent_pef, 0), COALESCE(percent_other, 0)]) AS value
FROM gfw2_countries
WHERE iso = UPPER('{iso}')"""
LOSS_OUTSIDE_PLANTATION = """
select loss_outside,perc_loss_outside,iso, threshold, year
FROM loss_outside_plantations
WHERE iso = UPPER('{iso}')
AND threshold = {thresh}
AND year > 2012
ORDER BY year asc"""
def _handler(response):
if response.status_code == 200:
data = json.loads(response.content)
if 'rows' in data:
return data['rows']
else:
return data
else:
raise Exception(response.content)
def _index(args):
if not 'order' in args:
args['order'] = ''
if not 'interval' in args:
args['interval'] = '12 Months'
query = CountrySql.INDEX.format(**args)
rows = _handler(cdb.execute(query))
return dict(countries=rows)
def _show(args):
query = CountrySql.SHOW.format(**args)
rows = _handler(cdb.execute(query))
return rows[0]
def _getTopoJson(args):
query = CountrySql.TOPO_JSON.format(**args)
rows = _handler(
cdb.execute(query, params=dict(format='topojson')))
return dict(topojson=rows)
def _processSubnatRow(x):
x['bounds'] = json.loads(x['bounds'])
return x
def _getSubnatBounds(args):
query = CountrySql.SUBNAT_BOUNDS.format(**args)
rows = _handler(cdb.execute(query))
results = map(_processSubnatRow, rows)
return dict(subnat_bounds=results)
def _getForma(args):
query = CountrySql.FORMA.format(**args)
return dict(forma=_handler(cdb.execute(query)))
def _getForests(args):
query = CountrySql.FORESTS.format(**args)
return dict(forests=_handler(cdb.execute(query)))
def _getTenure(args):
query = CountrySql.TENURE.format(**args)
return dict(tenure=_handler(cdb.execute(query)))
def _getBurnedForests(args):
query = CountrySql.BURNED_FOREST.format(**args)
return dict(burned_forests=_handler(cdb.execute(query)))
def _getReforestation(args):
query = CountrySql.REFORESTATION.format(**args)
return dict(reforestation=_handler(cdb.execute(query)))
def _getForestCertification(args):
query = CountrySql.FOREST_CERTIFICATION.format(**args)
return dict(forest_certification=_handler(cdb.execute(query)))
def _getLossOutsidePlantations(args):
if 'thresh' not in args:
args['thresh'] = 30
query = CountrySql.LOSS_OUTSIDE_PLANTATION.format(**args)
return dict(loss_outside_plantations=_handler(cdb.execute(query)))
def _getBounds(args):
query = CountrySql.BOUNDS.format(**args)
return dict(bounds=json.loads(_handler(cdb.execute(query))[0]['bounds']))
def _getstory(args):
return dict(story=stories.get_country_story(args))
def _getUmd(args):
action, data = umd.execute(args)
return dict(umd=data['years'])
def _getIfl(args):
args['ifl'] = True
ifl = umd.execute(args)
args['ifl'] = False
return dict(ifl=ifl)
def execute(args):
result = dict(params=args)
if args.get('index'):
result.update(_index(args))
else:
result.update(_show(args))
result.update(_getTopoJson(args))
result.update(_getSubnatBounds(args))
result.update(_getForma(args))
result.update(_getForests(args))
result.update(_getTenure(args))
result.update(_getBurnedForests(args))
result.update(_getReforestation(args))
result.update(_getForestCertification(args))
result.update(_getLossOutsidePlantations(args))
result.update(_getBounds(args))
result.update(_getUmd(args))
result.update(_getIfl(args))
result.update(_getstory(args))
return 'respond', result
|
wri/gfw-api
|
gfw/countries/countries.py
|
Python
|
gpl-2.0
| 8,785
|
from __future__ import absolute_import
import imp
import marshal
import os
import re
import sys
import warnings
try:
unicode
except NameError:
unicode = str
if sys.version_info[0] == 2:
from StringIO import StringIO
from StringIO import StringIO as BytesIO
else:
from io import BytesIO, StringIO
def imp_find_module(name, path=None):
"""
same as imp.find_module, but handles dotted names
"""
names = name.split(".")
if path is not None:
if isinstance(path, (str, unicode)):
path = [os.path.realpath(path)]
for name in names:
result = imp.find_module(name, path)
if result[0] is not None:
result[0].close()
path = [result[1]]
return result
def _check_importer_for_path(name, path_item):
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
break
except ImportError:
pass
else:
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
if importer is None:
try:
return imp.find_module(name, [path_item])
except ImportError:
return None
return importer.find_module(name)
def imp_walk(name):
"""
yields namepart, tuple_or_importer for each path item
raise ImportError if a name can not be found.
"""
warnings.warn("imp_walk will be removed in a future version", DeprecationWarning)
if name in sys.builtin_module_names:
yield name, (None, None, ("", "", imp.C_BUILTIN))
return
paths = sys.path
res = None
for namepart in name.split("."):
for path_item in paths:
res = _check_importer_for_path(namepart, path_item)
if hasattr(res, "load_module"):
if res.path.endswith(".py") or res.path.endswith(".pyw"):
fp = StringIO(res.get_source(namepart))
res = (fp, res.path, (".py", "rU", imp.PY_SOURCE))
elif res.path.endswith(".pyc") or res.path.endswith(".pyo"):
co = res.get_code(namepart)
fp = BytesIO(imp.get_magic() + b"\0\0\0\0" + marshal.dumps(co))
res = (fp, res.path, (".pyc", "rb", imp.PY_COMPILED))
else:
res = (
None,
res.path,
(os.path.splitext(res.path)[-1], "rb", imp.C_EXTENSION),
)
break
elif isinstance(res, tuple):
break
else:
break
yield namepart, res
paths = [os.path.join(path_item, namepart)]
else:
return
raise ImportError("No module named %s" % (name,))
cookie_re = re.compile(br"coding[:=]\s*([-\w.]+)")
if sys.version_info[0] == 2:
default_encoding = "ascii"
else:
default_encoding = "utf-8"
def guess_encoding(fp):
for _i in range(2):
ln = fp.readline()
m = cookie_re.search(ln)
if m is not None:
return m.group(1).decode("ascii")
return default_encoding
|
catapult-project/catapult
|
telemetry/third_party/modulegraph/modulegraph/util.py
|
Python
|
bsd-3-clause
| 3,262
|
import json
from corehq.apps.app_manager.tests.util import add_build, patch_default_builds
from corehq.apps.app_manager.util import add_odk_profile_after_build
from dimagi.utils.decorators.memoized import memoized
import os
import codecs
from django.test import TestCase
from corehq.apps.app_manager.models import Application, DetailColumn, import_app, APP_V1, ApplicationBase, Module
from corehq.apps.builds.models import BuildSpec
from corehq.apps.domain.shortcuts import create_domain
class AppManagerTest(TestCase):
with codecs.open(os.path.join(os.path.dirname(__file__), "data", "itext_form.xml"), encoding='utf-8') as f:
xform_str = f.read()
@classmethod
def setUpClass(cls):
cls.build1 = {'version': '1.2.dev', 'build_number': 7106}
cls.build2 = {'version': '2.7.0', 'build_number': 20655}
add_build(**cls.build1)
add_build(**cls.build2)
cls.domain = 'test-domain'
create_domain(cls.domain)
def setUp(self):
self.app = Application.new_app(self.domain, "TestApp", application_version=APP_V1)
for i in range(3):
module = self.app.add_module(Module.new_module("Module%d" % i, "en"))
for j in range(3):
self.app.new_form(module.id, name="Form%s-%s" % (i,j), attachment=self.xform_str, lang="en")
module = self.app.get_module(i)
detail = module.ref_details.short
detail.columns.append(
DetailColumn(header={"en": "test"}, model="case", field="test", format="plain")
)
detail.columns.append(
DetailColumn(header={"en": "age"}, model="case", field="age", format="years-ago")
)
self.app.save()
def test_increment_version(self):
old_version = self.app.version
self.app.save()
self.assertEqual(self.app.version, old_version + 1)
def tearDown(self):
self.app.delete()
def testSetUp(self):
self.assertEqual(len(self.app.modules), 3)
for module in self.app.get_modules():
self.assertEqual(len(module.forms), 3)
def testCreateJadJar(self):
# make sure this doesn't raise an error
self.app.build_spec = BuildSpec(**self.build1)
self.app.create_jadjar()
def testDeleteForm(self):
self.app.delete_form(self.app.modules[0].unique_id,
self.app.modules[0].forms[0].unique_id)
self.assertEqual(len(self.app.modules), 3)
for module, i in zip(self.app.get_modules(), [2,3,3]):
self.assertEqual(len(module.forms), i)
def testDeleteModule(self):
self.app.delete_module(self.app.modules[0].unique_id)
self.assertEqual(len(self.app.modules), 2)
def testSwapModules(self):
m0 = self.app.modules[0].name['en']
m1 = self.app.modules[1].name['en']
self.app.rearrange_modules(0,1)
self.assertEqual(self.app.modules[0].name['en'], m1)
self.assertEqual(self.app.modules[1].name['en'], m0)
@patch_default_builds
def testImportApp(self):
self.assertTrue(self.app._attachments)
new_app = import_app(self.app.id, self.domain)
self.assertEqual(set(new_app._attachments.keys()).intersection(self.app._attachments.keys()), set())
new_forms = list(new_app.get_forms())
old_forms = list(self.app.get_forms())
for new_form, old_form in zip(new_forms, old_forms):
self.assertEqual(new_form.source, old_form.source)
def testAppsBrief(self):
"""Test that ApplicationBase can wrap the
truncated version returned by applications_brief
"""
self.app.save()
apps = ApplicationBase.get_db().view('app_manager/applications_brief',
startkey=[self.domain],
limit=1,
).all()
self.assertEqual(len(apps), 1)
@property
@memoized
def _yesno_source(self):
# this app fixture uses both the (new) '_attachment'
# and the (old) 'contents' conventions, to test that both work
with open(os.path.join(os.path.dirname(__file__), 'data', 'yesno.json')) as f:
return json.load(f)
def _check_has_build_files(self, build):
min_acceptable_paths = (
'CommCare.jar',
'CommCare.jad',
'files/profile.ccpr',
'files/profile.xml',
'files/modules-0/forms-0.xml',
)
for path in min_acceptable_paths:
self.assertTrue(build.fetch_attachment(path))
def _check_legacy_odk_files(self, build):
self.assertTrue(build.copy_of)
with self.assertRaises(AttributeError):
build.odk_profile_created_after_build
path = 'files/profile.ccpr'
build_version = build.version
build.delete_attachment(path)
add_odk_profile_after_build(build)
build.save()
build = Application.get(build.get_id)
self.assertEqual(build.version, build_version)
self.assertTrue(build.fetch_attachment(path))
self.assertEqual(build.odk_profile_created_after_build, True)
def testBuildApp(self):
# do it from a NOT-SAVED app;
# regression test against case where contents gets lazy-put w/o saving
app = Application.wrap(self._yesno_source)
self.assertEqual(app['_id'], None) # i.e. hasn't been saved
app._id = Application.get_db().server.next_uuid()
copy = app.make_build()
copy.save()
self._check_has_build_files(copy)
self._check_legacy_odk_files(copy)
@patch_default_builds
def testBuildImportedApp(self):
app = import_app(self._yesno_source, self.domain)
copy = app.make_build()
copy.save()
self._check_has_build_files(copy)
self._check_legacy_odk_files(copy)
def testRevertToCopy(self):
old_name = 'old name'
new_name = 'new name'
app = Application.wrap(self._yesno_source)
app.name = old_name
app.save()
copy = app.make_build()
copy.save()
self.assertEqual(copy.name, old_name)
app.name = new_name
app.save()
app = Application.get(app.get_id)
self.assertEqual(app.name, new_name)
app = app.make_reversion_to_copy(copy)
app.save()
self.assertEqual(app.name, old_name)
def testUserReg(self):
"regression test for not catching ResourceNotFound"
self.app.show_user_registration = True
list(self.app.get_forms())
def test_jad_settings(self):
self.app.build_spec = BuildSpec(version='2.2.0', build_number=1)
self.assertIn('Build-Number', self.app.jad_settings)
self.app.build_spec = BuildSpec(version='2.8.0', build_number=1)
self.assertNotIn('Build-Number', self.app.jad_settings)
|
puttarajubr/commcare-hq
|
corehq/apps/app_manager/tests/test_app_manager.py
|
Python
|
bsd-3-clause
| 6,881
|
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 18 10:18:12 2017
@author: u64125
"""
import os
os.chdir(r'C:\mtpywin\mtpy') # change this path to the path where mtpy is installed
import os.path as op
from mtpy.modeling.modem import Model
from mtpy.modeling.modem import Data
from mtpy.modeling.modem import Covariance
from mtpy.core.edi import Edi
from mtpy.utils.calculator import get_period_list
import numpy as np
# path to save to
workdir = r'C:\test\ModEM'
# path where edi files are located
edipath = r'C:\mtpywin\mtpy\examples\data\edi_files_2'
## period list (won't include periods outside of the range of the edi file) ###
## comment/uncomment your desired method ######################################
###############################################################################
## example to specify start, stop and total number of periods
#start_period = 0.001
#stop_period = 1000
#n_periods = 25
#period_list = np.logspace(np.log10(start_period),
# np.log10(stop_period),
# n_periods)
# example to specify a number of periods per decade
start_period = 0.002
stop_period = 2000
periods_per_decade = 4
period_list = get_period_list(start_period,stop_period,periods_per_decade,
include_outside_range=True)
## an example to use the periods from a particular edi file
#edifile_periods = op.join(edipath,'Synth00.edi')
#eobj = Edi(edifile_periods)
#period_list = 1./eobj.freq
###############################################################################
# list of edi files, search for all files ending with '.edi'
edi_list = [op.join(edipath,ff) for ff in os.listdir(edipath) if (ff.endswith('.edi'))]
# make the save path if it doesn't exist
if not op.exists(workdir):
os.mkdir(workdir)
do = Data(edi_list=edi_list,
inv_mode = '1',
save_path=workdir,
period_list=period_list,
period_buffer = 2, # factor to stretch interpolation by. For example: if period_buffer=2
# then interpolated data points will only be included if they are
# within a factor of 2 of a true data point
error_type_z=np.array([['floor_percent','floor_egbert'], # error type, options are 'egbert', 'percent', 'mean_od', 'eigen', 'median', 'off_diagonals'
['floor_egbert','percent']]), # add floor to apply it as an error floor
# can supply a 2 x 2 array for each component or a single value
error_value_z=np.array([[20.,5.], # error floor value in percent
[5.,20.]]), # can supply a 2 x 2 array for each component or a single value
error_type_tipper = 'floor_abs', # type of error to set in tipper,
# floor_abs is an absolute value set as a floor
error_value_tipper =.03,
model_epsg=28354 # model epsg, currently set to utm zone 54.
# See http://spatialreference.org/ to find the epsg code for your projection
)
do.write_data_file()
# set elevations to zero as we need to ensure the stations are on the topography
do.data_array['elev'] = 0.
do.write_data_file(fill=False)
# create model file
mo = Model(station_locations=do.station_locations,
cell_size_east=8000,
cell_size_north=8000,
pad_north=7, # number of padding cells in each of the north and south directions
pad_east=7,# number of east and west padding cells
pad_z=6, # number of vertical padding cells
pad_stretch_v=1.6, # factor to increase by in padding cells (vertical)
pad_stretch_h=1.4, # factor to increase by in padding cells (horizontal)
pad_num=3, # number of constant-width cells to add to outside of model before padding cells start
# this number is currently multiplied by 1.5 internally
n_air_layers = 10, #number of air layers, set to 0 to incorporate bathymetry only
res_model=100, # halfspace resistivity value for reference model
n_layers=100, # total number of z layers, including air
z1_layer=10, # first layer thickness
pad_method='stretch', # method for calculating padding
z_mesh_method='new',
z_target_depth=120000 # depth to bottom of core model (padding after this depth)
)
mo.make_mesh()
mo.write_model_file(save_path=workdir)
# add topography to res model
# if the number of air layers is zero - bathymetry only will be added.
# if the number of air layers is nonzero - topography will be added, discretised into that number of cells
mo.add_topography_to_model2(r'C:\mtpywin\mtpy\examples\data\AussieContinent_etopo1.asc')
mo.write_model_file(save_path=workdir)
# update data elevations
do.project_stations_on_topography(mo)
# show the mesh
mo.plot_sealevel_resistivity()
co = Covariance()
co.smoothing_east = 0.4
co.smoothing_north = 0.4
co.smoothing_z = 0.4
co.write_covariance_file(model_fn=mo.model_fn)
|
MTgeophysics/mtpy
|
examples/scripts/ModEM_build_inputfiles.py
|
Python
|
gpl-3.0
| 5,315
|
import socket
import threading
bind_ip = ""
bind_port = 60007
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((bind_ip, bind_port))
server.listen(5)
print("[*] Listening on %s:%d" % (bind_ip, bind_port))
def handle_client(client_socket):
request = client_socket.recv(1024).decode()
print("[*] Received: %s" % request)
send_data = "ACK!"
client_socket.send(send_data.encode())
print(client_socket.getpeername())
client_socket.close()
while True:
client, addr = server.accept()
print("[*] Accepted connect from: %s:%d" % (addr[0], addr[1]))
client_handler = threading.Thread(target=handle_client, args=(client,))
client_handler.start()
|
xieyajie/BackHatPython
|
backhatpython02/server-tcp.py
|
Python
|
apache-2.0
| 707
|
from urllib.parse import urlparse
import warnings
from django.contrib.sitemaps import Sitemap, views
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from codalib.bagatom import TIME_FORMAT_STRING
from coda_mdstore.models import Bag
try:
MOST_RECENT_BAGGING_DATE = Bag.objects.latest(
'bagging_date'
).bagging_date.strftime(TIME_FORMAT_STRING)
except Exception:
MOST_RECENT_BAGGING_DATE = '2012-12-12T00:00:00Z'
def index(
request,
sitemaps,
template_name='sitemap_index.xml',
content_type='application/xml',
sitemap_url_name='resourcelist',
mimetype=None
):
"""
This method is overloaded from django.contrib.sitemaps.views.
we need this overload so that we can change the default method of
pagination display in the sitemaps index. it's a bit hacky - but it works.
"""
if mimetype:
warnings.warn(
"The mimetype keyword argument is deprecated, use "
"content_type instead", DeprecationWarning, stacklevel=2
)
content_type = mimetype
req_protocol = 'https' if request.is_secure() else 'http'
req_site = get_current_site(request)
sites = []
for section, site in sitemaps.items():
if callable(site):
site = site()
protocol = req_protocol if site.protocol is None else site.protocol
sitemap_url = reverse(
sitemap_url_name, kwargs={'section': section})
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
sites.append(absolute_url)
for page in range(2, site.paginator.num_pages + 1):
# we want to change how the pagination is displayed
sites.append(
'%s-%03d.xml' % (absolute_url.replace('-001.xml', ''), page)
)
return TemplateResponse(
request,
template_name,
{
'sitemaps': sites,
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def sitemap(request, sitemaps, section=None,
template_name='sitemap.xml', content_type='application/xml'):
"""
This method is overloaded from django.contrib.sitemaps.views.
we need this overload so that we can handle the urls served up by the other
overloaded method above "index".
"""
req_site = get_current_site(request)
# since we no longer give ?p arguments,
# we want the page to be the 'section'
page = section
# now, the 'section' is really the key of the sitemaps dict seen below
section = '001'
maps = [sitemaps[section]]
urls = []
for site in maps:
try:
if callable(site):
site = site()
u = site.get_urls(page=page, site=req_site)
urls.extend(u)
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page \'%s\'" % page)
for u in urls:
bag_name = urlparse(u['location']).path.replace('/bag/', '')
bag = get_object_or_404(Bag, name=bag_name)
u.setdefault('oxum', '%s.%s' % (bag.size, bag.files))
return TemplateResponse(
request,
template_name,
{
'urlset': urls,
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def changelist(request, sitemaps, section=None,
template_name='changelist.xml', content_type='application/xml'):
most_recent_bags = Bag.objects.order_by('-bagging_date', '-name').values(
'name',
'size',
'files',
'bagging_date'
)[:10000]
for b in most_recent_bags:
b['bagging_date'] = b['bagging_date'].strftime(TIME_FORMAT_STRING)
return TemplateResponse(
request,
template_name,
{
'urlset': reversed(most_recent_bags),
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def capabilitylist(
request,
template_name='mdstore/capabilitylist.xml',
content_type='application/xml'
):
return TemplateResponse(
request,
template_name,
{
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
# overload the stock sitemap pagination stuff with our own methods
setattr(views, 'index', index)
setattr(views, 'sitemap', sitemap)
setattr(Sitemap, 'limit', 5000)
class BaseSitemap(Sitemap):
lastmod = None
protocol = 'http'
def items(self):
# return the list of all the bags sorted by bagging_date
return Bag.objects.order_by('bagging_date', 'name').values('name')
def location(self, obj):
# if we just return the object it will give a unicode value tuple
return "/bag/%s" % obj['name']
sitemaps = {
'001': BaseSitemap,
}
|
unt-libraries/coda
|
coda/coda_mdstore/resourcesync.py
|
Python
|
bsd-3-clause
| 5,178
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dataset_builder."""
import os
import numpy as np
import tensorflow as tf
from google.protobuf import text_format
from google3.testing.pybase import parameterized
from tensorflow.core.example import example_pb2
from tensorflow.core.example import feature_pb2
from lstm_object_detection import seq_dataset_builder
from lstm_object_detection.protos import pipeline_pb2 as internal_pipeline_pb2
from object_detection.builders import preprocessor_builder
from object_detection.core import standard_fields as fields
from object_detection.protos import input_reader_pb2
from object_detection.protos import pipeline_pb2
from object_detection.protos import preprocessor_pb2
class DatasetBuilderTest(parameterized.TestCase):
def _create_tf_record(self):
path = os.path.join(self.get_temp_dir(), 'tfrecord')
writer = tf.python_io.TFRecordWriter(path)
image_tensor = np.random.randint(255, size=(16, 16, 3)).astype(np.uint8)
with self.test_session():
encoded_jpeg = tf.image.encode_jpeg(tf.constant(image_tensor)).eval()
sequence_example = example_pb2.SequenceExample(
context=feature_pb2.Features(
feature={
'image/format':
feature_pb2.Feature(
bytes_list=feature_pb2.BytesList(
value=['jpeg'.encode('utf-8')])),
'image/height':
feature_pb2.Feature(
int64_list=feature_pb2.Int64List(value=[16])),
'image/width':
feature_pb2.Feature(
int64_list=feature_pb2.Int64List(value=[16])),
}),
feature_lists=feature_pb2.FeatureLists(
feature_list={
'image/encoded':
feature_pb2.FeatureList(feature=[
feature_pb2.Feature(
bytes_list=feature_pb2.BytesList(
value=[encoded_jpeg])),
]),
'image/object/bbox/xmin':
feature_pb2.FeatureList(feature=[
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=[0.0])),
]),
'image/object/bbox/xmax':
feature_pb2.FeatureList(feature=[
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=[1.0]))
]),
'image/object/bbox/ymin':
feature_pb2.FeatureList(feature=[
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=[0.0])),
]),
'image/object/bbox/ymax':
feature_pb2.FeatureList(feature=[
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=[1.0]))
]),
'image/object/class/label':
feature_pb2.FeatureList(feature=[
feature_pb2.Feature(
int64_list=feature_pb2.Int64List(value=[2]))
]),
}))
writer.write(sequence_example.SerializeToString())
writer.close()
return path
def _get_model_configs_from_proto(self):
"""Creates a model text proto for testing.
Returns:
A dictionary of model configs.
"""
model_text_proto = """
[object_detection.protos.lstm_model] {
train_unroll_length: 4
eval_unroll_length: 4
}
model {
ssd {
feature_extractor {
type: 'lstm_mobilenet_v1_fpn'
conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
negative_class_weight: 2.0
box_coder {
faster_rcnn_box_coder {
}
}
matcher {
argmax_matcher {
}
}
similarity_calculator {
iou_similarity {
}
}
anchor_generator {
ssd_anchor_generator {
aspect_ratios: 1.0
}
}
image_resizer {
fixed_shape_resizer {
height: 32
width: 32
}
}
box_predictor {
convolutional_box_predictor {
conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
normalize_loc_loss_by_codesize: true
loss {
classification_loss {
weighted_softmax {
}
}
localization_loss {
weighted_smooth_l1 {
}
}
}
}
}"""
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
text_format.Merge(model_text_proto, pipeline_config)
configs = {}
configs['model'] = pipeline_config.model
configs['lstm_model'] = pipeline_config.Extensions[
internal_pipeline_pb2.lstm_model]
return configs
def _get_data_augmentation_preprocessor_proto(self):
preprocessor_text_proto = """
random_horizontal_flip {
}
"""
preprocessor_proto = preprocessor_pb2.PreprocessingStep()
text_format.Merge(preprocessor_text_proto, preprocessor_proto)
return preprocessor_proto
def _create_training_dict(self, tensor_dict):
image_dict = {}
all_dict = {}
all_dict['batch'] = tensor_dict.pop('batch')
for i, _ in enumerate(tensor_dict[fields.InputDataFields.image]):
for key, val in tensor_dict.items():
image_dict[key] = val[i]
image_dict[fields.InputDataFields.image] = tf.to_float(
tf.expand_dims(image_dict[fields.InputDataFields.image], 0))
suffix = str(i)
for key, val in image_dict.items():
all_dict[key + suffix] = val
return all_dict
def _get_input_proto(self, input_reader):
return """
external_input_reader {
[lstm_object_detection.input_readers.GoogleInputReader.google_input_reader] {
%s: {
input_path: '{0}'
data_type: TF_SEQUENCE_EXAMPLE
video_length: 4
}
}
}
""" % input_reader
@parameterized.named_parameters(('tf_record', 'tf_record_video_input_reader'))
def test_video_input_reader(self, video_input_type):
input_reader_proto = input_reader_pb2.InputReader()
text_format.Merge(
self._get_input_proto(video_input_type), input_reader_proto)
configs = self._get_model_configs_from_proto()
tensor_dict = seq_dataset_builder.build(
input_reader_proto,
configs['model'],
configs['lstm_model'],
unroll_length=1)
all_dict = self._create_training_dict(tensor_dict)
self.assertEqual((1, 32, 32, 3), all_dict['image0'].shape)
self.assertEqual(4, all_dict['groundtruth_boxes0'].shape[1])
def test_build_with_data_augmentation(self):
input_reader_proto = input_reader_pb2.InputReader()
text_format.Merge(
self._get_input_proto('tf_record_video_input_reader'),
input_reader_proto)
configs = self._get_model_configs_from_proto()
data_augmentation_options = [
preprocessor_builder.build(
self._get_data_augmentation_preprocessor_proto())
]
tensor_dict = seq_dataset_builder.build(
input_reader_proto,
configs['model'],
configs['lstm_model'],
unroll_length=1,
data_augmentation_options=data_augmentation_options)
all_dict = self._create_training_dict(tensor_dict)
self.assertEqual((1, 32, 32, 3), all_dict['image0'].shape)
self.assertEqual(4, all_dict['groundtruth_boxes0'].shape[1])
def test_raises_error_without_input_paths(self):
input_reader_text_proto = """
shuffle: false
num_readers: 1
load_instance_masks: true
"""
input_reader_proto = input_reader_pb2.InputReader()
text_format.Merge(input_reader_text_proto, input_reader_proto)
configs = self._get_model_configs_from_proto()
with self.assertRaises(ValueError):
_ = seq_dataset_builder.build(
input_reader_proto,
configs['model'],
configs['lstm_model'],
unroll_length=1)
if __name__ == '__main__':
tf.test.main()
|
cshallue/models
|
research/lstm_object_detection/seq_dataset_builder_test.py
|
Python
|
apache-2.0
| 9,297
|
from p2pool.bitcoin import networks
PARENT = networks.nets['hawaiicoin']
SHARE_PERIOD = 10 # seconds
CHAIN_LENGTH = 12*60*60//10 # shares
REAL_CHAIN_LENGTH = 12*60*60//10 # shares
TARGET_LOOKBEHIND = 20 # shares
SPREAD = 30 # blocks
IDENTIFIER = '1f7c84911f8491c8'.decode('hex')
PREFIX = 'c8c81f1f94949191'.decode('hex')
P2P_PORT = 8934
MIN_TARGET = 0
MAX_TARGET = 2**256//2**20 - 1
PERSIST = False
WORKER_PORT = 9834
BOOTSTRAP_ADDRS = 'p2pool-us.coin-project.org p2pool-eu.coin-project.org p2pool-eu.gotgeeks.com p2pool-us.gotgeeks.com rav3n.dtdns.net doge.dtdns.net pool.hostv.pl p2pool.org p2pool.gotgeeks.com p2pool.dtdns.net solidpool.org taken.pl'.split(' ')
ANNOUNCE_CHANNEL = '#p2pool-alt'
VERSION_CHECK = lambda v: True
|
alexandrcoin/p2pooldoge-all
|
p2pool/networks/hawaiicoin.py
|
Python
|
gpl-3.0
| 730
|
import configparser
import ast
class Config(configparser.RawConfigParser):
_CONFIG_FILE=""
def load(self,file):
self._CONFIG_FILE = file
self.read(file)
def save(self):
with open(self._CONFIG_FILE, 'w') as configfile:
self.write(configfile)
def get(self,setting):
return(configparser.RawConfigParser.get(self,'settings',setting))
def get_servers(self):
d1 = ast.literal_eval(configparser.RawConfigParser.get(self,'settings', 'servers'))
return(d1)
def set_servers(self,servers):
configparser.RawConfigParser.set(self, 'settings', 'servers',str(servers))
def set(self,setting,value):
configparser.RawConfigParser.set(self,'settings',setting,value)
config = Config()
|
oschwab/dcss_stats
|
dcss_stats/core/_config.py
|
Python
|
unlicense
| 778
|
# encoding: utf-8
"""
configuration.py
Created by Thomas Mangin on 2011-11-29.
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
# NOTE: reloading mid-program not possible
import os
import sys
import logging
import pwd
import math
import socket
import struct
_application = None
_config = None
_defaults = None
class ConfigurationError (Exception):
pass
_syslog_name_value = {
'CRITICAL' : logging.CRITICAL,
'ERROR' : logging.ERROR,
'WARNING' : logging.WARNING,
'INFO' : logging.INFO,
'DEBUG' : logging.DEBUG,
}
_syslog_value_name = {
logging.CRITICAL : 'CRITICAL',
logging.ERROR : 'ERROR',
logging.WARNING : 'WARNING',
logging.INFO : 'INFO',
logging.DEBUG : 'DEBUG',
}
class NoneDict (dict):
def __getitem__ (self,name):
return None
nonedict = NoneDict()
home = os.path.normpath(sys.argv[0]) if sys.argv[0].startswith('/') else os.path.normpath(os.path.join(os.getcwd(),sys.argv[0]))
class value (object):
@staticmethod
def nop (_):
return _
@staticmethod
def syslog (log):
if log not in _syslog_name_value:
raise TypeError('invalid log level %s' % log)
return _syslog_name_value[log]
@staticmethod
def root (path):
roots = home.split(os.sep)
location = []
for index in range(len(roots)-1,-1,-1):
if roots[index] in ('lib','bin'):
if index:
location = roots[:index]
break
root = os.path.join(*location)
paths = [
os.path.normpath(os.path.join(os.path.join(os.sep,root,path))),
os.path.normpath(os.path.expanduser(value.unquote(path))),
os.path.normpath(os.path.join('/',path)),
os.path.normpath(os.path.join('/','usr',path)),
]
return paths
@staticmethod
def integer (_):
value = int(_)
if value <= 0:
raise TypeError('the value must be positive')
return value
@staticmethod
def lowunquote (_):
return _.strip().strip('\'"').lower()
@staticmethod
def unquote (_):
return _.strip().strip('\'"')
@staticmethod
def boolean (_):
return _.lower() in ('1','yes','on','enable','true')
@staticmethod
def list (_):
return _.split()
@staticmethod
def ports (_):
try:
return [int(x) for x in _.split()]
except ValueError:
raise TypeError('resolv.conf can not be found (are you using DHCP without any network setup ?)')
@staticmethod
def methods (_):
return _.upper().split()
@staticmethod
def user (_):
try:
pwd.getpwnam(_)
# uid = answer[2]
except KeyError:
raise TypeError('user %s is not found on this system' % _)
return _
@staticmethod
def folder(path):
paths = value.root(path)
options = [path for path in paths if os.path.exists(path)]
if not options: raise TypeError('%s does not exists' % path)
first = options[0]
if not first: raise TypeError('%s does not exists' % first)
return first
@staticmethod
def conf(path):
first = value.folder(path)
if not os.path.isfile(first): raise TypeError('%s is not a file' % path)
return first
@staticmethod
def resolver(path):
global _application
paths = value.root('etc/%s/dns/resolv.conf' % _application)
paths.append(os.path.normpath(os.path.join('/','etc','resolv.conf')))
paths.append(os.path.normpath(os.path.join('/','var','run','resolv.conf')))
for resolver in paths:
if os.path.exists(resolver):
with open(resolver) as r:
if 'nameserver' in (line.strip().split(None,1)[0].lower() for line in r.readlines() if line.strip()):
return resolver
raise TypeError('resolv.conf can not be found (are you using DHCP without any network setup ?)')
@staticmethod
def exe (path):
argv = path.split(' ',1)
program = value.conf(argv.pop(0))
if not os.access(program, os.X_OK):
raise TypeError('%s is not an executable' % program)
return program if not argv else '%s %s' % (program,argv[0])
@staticmethod
def services (string):
try:
services = []
for service in value.unquote(string).split():
host,port = service.split(':')
services.append((host,int(port)))
return services
except ValueError:
raise TypeError('resolv.conf can not be found (are you using DHCP without any network setup ?)')
@staticmethod
def ranges (string):
try:
ranges = []
for service in value.unquote(string).split():
network,netmask = service.split('/')
if ':' in network:
high,low = struct.unpack('!QQ',socket.inet_pton(socket.AF_INET6,network))
start = (high << 64) + low
end = start + pow(2,128-int(netmask)) - 1
ranges.append((6,start,end))
else:
start = struct.unpack('!L',socket.inet_pton(socket.AF_INET,network))[0]
end = start + pow(2,32-int(netmask)) - 1
ranges.append((4,start,end))
return ranges
except ValueError:
raise TypeError('Can not parse the data as IP range')
@staticmethod
def redirector (name):
if name == 'url' or name.startswith('icap://'):
return name
raise TypeError('invalid redirector protocol %s, options are url or header' % name)
class string (object):
@staticmethod
def nop (_):
return _
@staticmethod
def syslog (log):
if log not in _syslog_value_name:
raise TypeError('invalid log level %s' % log)
return _syslog_value_name[log]
@staticmethod
def quote (_):
return "'%s'" % str(_)
@staticmethod
def lower (_):
return str(_).lower()
@staticmethod
def path (path):
split = sys.argv[0].split('lib/%s' % _application)
if len(split) > 1:
prefix = os.sep.join(split[:1])
if prefix and path.startswith(prefix):
path = path[len(prefix):]
home = os.path.expanduser('~')
if path.startswith(home):
return "'~%s'" % path[len(home):]
return "'%s'" % path
@staticmethod
def list (_):
return "'%s'" % ' '.join((str(x) for x in _))
@staticmethod
def services (_):
l = ' '.join(('%s:%d' % (host,port) for host,port in _))
return "'%s'" % l
@staticmethod
def ranges (_):
def convert ():
for (proto,start,end) in _:
bits = int(math.log(end-start+1,2))
if proto == 4:
network = socket.inet_ntop(socket.AF_INET,struct.pack('!L',start))
yield '%s/%d' % (network,32-bits)
else:
high = struct.pack('!Q',start >> 64)
low = struct.pack('!Q',start & 0xFFFFFFFF)
network = socket.inet_ntop(socket.AF_INET6,high+low)
yield '%s/%d' % (network,128-bits)
return "'%s'" % ' '.join(convert())
import ConfigParser
class Store (dict):
def __getitem__ (self,key):
return dict.__getitem__(self,key.replace('_','-'))
def __setitem__ (self,key,value):
return dict.__setitem__(self,key.replace('_','-'),value)
def __getattr__ (self,key):
return dict.__getitem__(self,key.replace('_','-'))
def __setattr__ (self,key,value):
return dict.__setitem__(self,key.replace('_','-'),value)
def _configuration (conf):
location = os.path.join(os.sep,*os.path.join(home.split(os.sep)))
while location and location != '/':
location, directory = os.path.split(location)
if directory in ('lib','bin'):
break
_conf_paths = []
if conf:
_conf_paths.append(os.path.abspath(os.path.normpath(conf)))
if location:
_conf_paths.append(os.path.normpath(os.path.join(location,'etc',_application,'%s.conf' % _application)))
_conf_paths.append(os.path.normpath(os.path.join('/','etc',_application,'%s.conf' % _application)))
_conf_paths.append(os.path.normpath(os.path.join('/','usr','etc',_application,'%s.conf' % _application)))
configuration = Store()
ini = ConfigParser.ConfigParser()
ini_files = [path for path in _conf_paths if os.path.exists(path)]
if ini_files:
ini.read(ini_files[0])
for section in _defaults:
default = _defaults[section]
for option in default:
convert = default[option][0]
try:
proxy_section = '%s.%s' % (_application,section)
env_name = '%s.%s' % (proxy_section,option)
rep_name = env_name.replace('.','_')
if env_name in os.environ:
conf = os.environ.get(env_name)
elif rep_name in os.environ:
conf = os.environ.get(rep_name)
else:
try:
# raise and set the default
conf = value.unquote(ini.get(section,option,nonedict))
except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
# raise and set the default
conf = value.unquote(ini.get(proxy_section,option,nonedict))
# name without an = or : in the configuration and no value
if conf is None:
conf = default[option][2]
except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
conf = default[option][2]
try:
configuration.setdefault(section,Store())[option] = convert(conf)
except TypeError,error:
raise ConfigurationError('invalid value for %s.%s : %s (%s)' % (section,option,conf,str(error)))
return configuration
def load (application=None,defaults=None,conf=None):
global _application
global _defaults
global _config
if _config:
return _config
if conf is None:
raise RuntimeError('You can not have an import using load() before main() initialised it')
_application = application
_defaults = defaults
_config = _configuration(conf)
return _config
def default ():
for section in sorted(_defaults):
for option in sorted(_defaults[section]):
values = _defaults[section][option]
default = "'%s'" % values[2] if values[1] in (string.list,string.path,string.quote) else values[2]
yield '%s.%s.%s %s: %s. default (%s)' % (_application,section,option,' '*(20-len(section)-len(option)),values[3],default)
def ini (diff=False):
for section in sorted(_config):
if section in ('proxy','debug'):
continue
header = '\n[%s]' % section
for k in sorted(_config[section]):
v = _config[section][k]
if diff and _defaults[section][k][0](_defaults[section][k][2]) == v:
continue
if header:
print header
header = ''
print '%s = %s' % (k,_defaults[section][k][1](v))
def env (diff=False):
print
for section,values in _config.items():
if section in ('proxy','debug'):
continue
for k,v in values.items():
if diff and _defaults[section][k][0](_defaults[section][k][2]) == v:
continue
if _defaults[section][k][1] == string.quote:
print "%s.%s.%s='%s'" % (_application,section,k,v)
continue
print "%s.%s.%s=%s" % (_application,section,k,_defaults[section][k][1](v))
|
david-farrar/exaproxy
|
lib/exaproxy/configuration.py
|
Python
|
bsd-2-clause
| 10,172
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2017 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports
#+---------------------------------------------------------------------------+
import unittest
#+---------------------------------------------------------------------------+
#| Local application imports
#+---------------------------------------------------------------------------+
def getSuite():
typeSuite = unittest.TestSuite()
return typeSuite
|
lootr/netzob
|
netzob/test/src/test_netzob/test_Common/suite_Type.py
|
Python
|
gpl-3.0
| 2,410
|
# -*- coding: utf-8 -*-
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Matti Hämäläinen <msh@nmr.mgh.harvard.edu>
# Teon Brooks <teon.brooks@gmail.com>
# Stefan Appelhoff <stefan.appelhoff@mailbox.org>
# Joan Massich <mailsik@gmail.com>
#
# License: BSD (3-clause)
import heapq
from collections import Counter
import datetime
import os.path as op
import numpy as np
from ..utils import logger, warn, Bunch, _validate_type
from .constants import FIFF, _coord_frame_named
from .tree import dir_tree_find
from .tag import read_tag
from .write import (start_file, end_file, write_dig_points)
from ..transforms import (apply_trans, Transform,
get_ras_to_neuromag_trans, combine_transforms,
invert_transform, _to_const, _str_to_frame,
_coord_frame_name)
from .. import __version__
_dig_kind_dict = {
'cardinal': FIFF.FIFFV_POINT_CARDINAL,
'hpi': FIFF.FIFFV_POINT_HPI,
'eeg': FIFF.FIFFV_POINT_EEG,
'extra': FIFF.FIFFV_POINT_EXTRA,
}
_dig_kind_ints = tuple(sorted(_dig_kind_dict.values()))
_dig_kind_proper = {'cardinal': 'Cardinal',
'hpi': 'HPI',
'eeg': 'EEG',
'extra': 'Extra',
'unknown': 'Unknown'}
_dig_kind_rev = {val: key for key, val in _dig_kind_dict.items()}
_cardinal_kind_rev = {1: 'LPA', 2: 'Nasion', 3: 'RPA', 4: 'Inion'}
def _format_dig_points(dig, enforce_order=False):
"""Format the dig points nicely."""
if enforce_order and dig is not None:
# reorder points based on type:
# Fiducials/HPI, EEG, extra (headshape)
fids_digpoints = []
hpi_digpoints = []
eeg_digpoints = []
extra_digpoints = []
head_digpoints = []
# use a heap to enforce order on FIDS, EEG, Extra
for idx, digpoint in enumerate(dig):
ident = digpoint['ident']
kind = digpoint['kind']
# push onto heap based on 'ident' (for the order) for
# each of the possible DigPoint 'kind's
# keep track of 'idx' in case of any clashes in
# the 'ident' variable, which can occur when
# user passes in DigMontage + DigMontage
if kind == FIFF.FIFFV_POINT_CARDINAL:
heapq.heappush(fids_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_HPI:
heapq.heappush(hpi_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_EEG:
heapq.heappush(eeg_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_EXTRA:
heapq.heappush(extra_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_HEAD:
heapq.heappush(head_digpoints, (ident, idx, digpoint))
# now recreate dig based on sorted order
fids_digpoints.sort(), hpi_digpoints.sort()
eeg_digpoints.sort()
extra_digpoints.sort(), head_digpoints.sort()
new_dig = []
for idx, d in enumerate(fids_digpoints + hpi_digpoints +
extra_digpoints + eeg_digpoints +
head_digpoints):
new_dig.append(d[-1])
dig = new_dig
return [DigPoint(d) for d in dig] if dig is not None else dig
def _get_dig_eeg(dig):
return [d for d in dig if d['kind'] == FIFF.FIFFV_POINT_EEG]
def _count_points_by_type(dig):
"""Get the number of points of each type."""
occurrences = Counter([d['kind'] for d in dig])
return dict(
fid=occurrences[FIFF.FIFFV_POINT_CARDINAL],
hpi=occurrences[FIFF.FIFFV_POINT_HPI],
eeg=occurrences[FIFF.FIFFV_POINT_EEG],
extra=occurrences[FIFF.FIFFV_POINT_EXTRA],
)
_dig_keys = {'kind', 'ident', 'r', 'coord_frame'}
class DigPoint(dict):
"""Container for a digitization point.
This is a simple subclass of the standard dict type designed to provide
a readable string representation.
Parameters
----------
kind : int
The kind of channel,
e.g. ``FIFFV_POINT_EEG``, ``FIFFV_POINT_CARDINAL``.
r : array, shape (3,)
3D position in m. and coord_frame.
ident : int
Number specifying the identity of the point.
e.g. ``FIFFV_POINT_NASION`` if kind is ``FIFFV_POINT_CARDINAL``,
or 42 if kind is ``FIFFV_POINT_EEG``.
coord_frame : int
The coordinate frame used, e.g. ``FIFFV_COORD_HEAD``.
"""
def __repr__(self): # noqa: D105
if self['kind'] == FIFF.FIFFV_POINT_CARDINAL:
id_ = _cardinal_kind_rev.get(self['ident'], 'Unknown cardinal')
else:
id_ = _dig_kind_proper[
_dig_kind_rev.get(self['kind'], 'unknown')]
id_ = ('%s #%s' % (id_, self['ident']))
id_ = id_.rjust(10)
cf = _coord_frame_name(self['coord_frame'])
pos = ('(%0.1f, %0.1f, %0.1f) mm' % tuple(1000 * self['r'])).ljust(25)
return ('<DigPoint | %s : %s : %s frame>' % (id_, pos, cf))
# speed up info copy by only deep copying the mutable item
def __deepcopy__(self, memodict):
"""Make a deepcopy."""
return DigPoint(
kind=self['kind'], r=self['r'].copy(),
ident=self['ident'], coord_frame=self['coord_frame'])
def __eq__(self, other): # noqa: D105
"""Compare two DigPoints.
Two digpoints are equal if they are the same kind, share the same
coordinate frame and position.
"""
my_keys = ['kind', 'ident', 'coord_frame']
if set(self.keys()) != set(other.keys()):
return False
elif any(self[_] != other[_] for _ in my_keys):
return False
else:
return np.allclose(self['r'], other['r'])
def _read_dig_fif(fid, meas_info):
"""Read digitizer data from a FIFF file."""
isotrak = dir_tree_find(meas_info, FIFF.FIFFB_ISOTRAK)
dig = None
if len(isotrak) == 0:
logger.info('Isotrak not found')
elif len(isotrak) > 1:
warn('Multiple Isotrak found')
else:
isotrak = isotrak[0]
coord_frame = FIFF.FIFFV_COORD_HEAD
dig = []
for k in range(isotrak['nent']):
kind = isotrak['directory'][k].kind
pos = isotrak['directory'][k].pos
if kind == FIFF.FIFF_DIG_POINT:
tag = read_tag(fid, pos)
dig.append(tag.data)
elif kind == FIFF.FIFF_MNE_COORD_FRAME:
tag = read_tag(fid, pos)
coord_frame = _coord_frame_named.get(int(tag.data))
for d in dig:
d['coord_frame'] = coord_frame
return _format_dig_points(dig)
def write_dig(fname, pts, coord_frame=None):
"""Write digitization data to a FIF file.
Parameters
----------
fname : str
Destination file name.
pts : iterator of dict
Iterator through digitizer points. Each point is a dictionary with
the keys 'kind', 'ident' and 'r'.
coord_frame : int | str | None
If all the points have the same coordinate frame, specify the type
here. Can be None (default) if the points could have varying
coordinate frames.
"""
if coord_frame is not None:
coord_frame = _to_const(coord_frame)
pts_frames = {pt.get('coord_frame', coord_frame) for pt in pts}
bad_frames = pts_frames - {coord_frame}
if len(bad_frames) > 0:
raise ValueError(
'Points have coord_frame entries that are incompatible with '
'coord_frame=%i: %s.' % (coord_frame, str(tuple(bad_frames))))
with start_file(fname) as fid:
write_dig_points(fid, pts, block=True, coord_frame=coord_frame)
end_file(fid)
_cardinal_ident_mapping = {
FIFF.FIFFV_POINT_NASION: 'nasion',
FIFF.FIFFV_POINT_LPA: 'lpa',
FIFF.FIFFV_POINT_RPA: 'rpa',
}
# XXXX:
# This does something really similar to _read_dig_montage_fif but:
# - does not check coord_frame
# - does not do any operation that implies assumptions with the names
def _get_data_as_dict_from_dig(dig):
"""Obtain coordinate data from a Dig.
Parameters
----------
dig : list of dicts
A container of DigPoints to be added to the info['dig'].
Returns
-------
ch_pos : dict
The container of all relevant channel positions inside dig.
"""
# Split up the dig points by category
hsp, hpi, elp = list(), list(), list()
fids, dig_ch_pos_location = dict(), list()
for d in dig:
if d['kind'] == FIFF.FIFFV_POINT_CARDINAL:
fids[_cardinal_ident_mapping[d['ident']]] = d['r']
elif d['kind'] == FIFF.FIFFV_POINT_HPI:
hpi.append(d['r'])
elp.append(d['r'])
# XXX: point_names.append('HPI%03d' % d['ident'])
elif d['kind'] == FIFF.FIFFV_POINT_EXTRA:
hsp.append(d['r'])
elif d['kind'] == FIFF.FIFFV_POINT_EEG:
# XXX: dig_ch_pos['EEG%03d' % d['ident']] = d['r']
if d['ident'] != 0: # ref channel
dig_ch_pos_location.append(d['r'])
dig_coord_frames = set([d['coord_frame'] for d in dig])
assert len(dig_coord_frames) == 1, \
'Only single coordinate frame in dig is supported' # XXX
return Bunch(
nasion=fids.get('nasion', None),
lpa=fids.get('lpa', None),
rpa=fids.get('rpa', None),
hsp=np.array(hsp) if len(hsp) else None,
hpi=np.array(hpi) if len(hpi) else None,
elp=np.array(elp) if len(elp) else None,
dig_ch_pos_location=dig_ch_pos_location,
coord_frame=dig_coord_frames.pop(),
)
def _get_fid_coords(dig, raise_error=True):
fid_coords = Bunch(nasion=None, lpa=None, rpa=None)
fid_coord_frames = dict()
for d in dig:
if d['kind'] == FIFF.FIFFV_POINT_CARDINAL:
key = _cardinal_ident_mapping[d['ident']]
fid_coords[key] = d['r']
fid_coord_frames[key] = d['coord_frame']
if len(fid_coord_frames) > 0 and raise_error:
if set(fid_coord_frames.keys()) != set(['nasion', 'lpa', 'rpa']):
raise ValueError("Some fiducial points are missing (got %s)." %
fid_coords.keys())
if len(set(fid_coord_frames.values())) > 1:
raise ValueError(
'All fiducial points must be in the same coordinate system '
'(got %s)' % len(fid_coord_frames)
)
coord_frame = fid_coord_frames.popitem()[1] if fid_coord_frames else None
return fid_coords, coord_frame
def _write_dig_points(fname, dig_points):
"""Write points to text file.
Parameters
----------
fname : str
Path to the file to write. The kind of file to write is determined
based on the extension: '.txt' for tab separated text file.
dig_points : numpy.ndarray, shape (n_points, 3)
Points.
"""
_, ext = op.splitext(fname)
dig_points = np.asarray(dig_points)
if (dig_points.ndim != 2) or (dig_points.shape[1] != 3):
err = ("Points must be of shape (n_points, 3), "
"not %s" % (dig_points.shape,))
raise ValueError(err)
if ext == '.txt':
with open(fname, 'wb') as fid:
version = __version__
now = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y")
fid.write(b'%% Ascii 3D points file created by mne-python version'
b' %s at %s\n' % (version.encode(), now.encode()))
fid.write(b'%% %d 3D points, x y z per line\n' % len(dig_points))
np.savetxt(fid, dig_points, delimiter='\t', newline='\n')
else:
msg = "Unrecognized extension: %r. Need '.txt'." % ext
raise ValueError(msg)
def _coord_frame_const(coord_frame):
if not isinstance(coord_frame, str) or coord_frame not in _str_to_frame:
raise ValueError('coord_frame must be one of %s, got %s'
% (sorted(_str_to_frame.keys()), coord_frame))
return _str_to_frame[coord_frame]
def _make_dig_points(nasion=None, lpa=None, rpa=None, hpi=None,
extra_points=None, dig_ch_pos=None,
coord_frame='head'):
"""Construct digitizer info for the info.
Parameters
----------
nasion : array-like | numpy.ndarray, shape (3,) | None
Point designated as the nasion point.
lpa : array-like | numpy.ndarray, shape (3,) | None
Point designated as the left auricular point.
rpa : array-like | numpy.ndarray, shape (3,) | None
Point designated as the right auricular point.
hpi : array-like | numpy.ndarray, shape (n_points, 3) | None
Points designated as head position indicator points.
extra_points : array-like | numpy.ndarray, shape (n_points, 3)
Points designed as the headshape points.
dig_ch_pos : dict
Dict of EEG channel positions.
coord_frame : str
The coordinate frame of the points. Usually this is "unknown"
for native digitizer space. Defaults to "head".
Returns
-------
dig : list of dicts
A container of DigPoints to be added to the info['dig'].
"""
coord_frame = _coord_frame_const(coord_frame)
dig = []
if lpa is not None:
lpa = np.asarray(lpa)
if lpa.shape != (3,):
raise ValueError('LPA should have the shape (3,) instead of %s'
% (lpa.shape,))
dig.append({'r': lpa, 'ident': FIFF.FIFFV_POINT_LPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'coord_frame': coord_frame})
if nasion is not None:
nasion = np.asarray(nasion)
if nasion.shape != (3,):
raise ValueError('Nasion should have the shape (3,) instead of %s'
% (nasion.shape,))
dig.append({'r': nasion, 'ident': FIFF.FIFFV_POINT_NASION,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'coord_frame': coord_frame})
if rpa is not None:
rpa = np.asarray(rpa)
if rpa.shape != (3,):
raise ValueError('RPA should have the shape (3,) instead of %s'
% (rpa.shape,))
dig.append({'r': rpa, 'ident': FIFF.FIFFV_POINT_RPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'coord_frame': coord_frame})
if hpi is not None:
hpi = np.asarray(hpi)
if hpi.ndim != 2 or hpi.shape[1] != 3:
raise ValueError('HPI should have the shape (n_points, 3) instead '
'of %s' % (hpi.shape,))
for idx, point in enumerate(hpi):
dig.append({'r': point, 'ident': idx + 1,
'kind': FIFF.FIFFV_POINT_HPI,
'coord_frame': coord_frame})
if extra_points is not None:
extra_points = np.asarray(extra_points)
if extra_points.shape[1] != 3:
raise ValueError('Points should have the shape (n_points, 3) '
'instead of %s' % (extra_points.shape,))
for idx, point in enumerate(extra_points):
dig.append({'r': point, 'ident': idx + 1,
'kind': FIFF.FIFFV_POINT_EXTRA,
'coord_frame': coord_frame})
if dig_ch_pos is not None:
try: # use the last 3 as int if possible (e.g., EEG001->1)
idents = []
for key in dig_ch_pos:
_validate_type(key, str, 'dig_ch_pos')
idents.append(int(key[-3:]))
except ValueError: # and if any conversion fails, simply use arange
idents = np.arange(1, len(dig_ch_pos) + 1)
for key, ident in zip(dig_ch_pos, idents):
dig.append({'r': dig_ch_pos[key], 'ident': int(ident),
'kind': FIFF.FIFFV_POINT_EEG,
'coord_frame': coord_frame})
return _format_dig_points(dig)
def _call_make_dig_points(nasion, lpa, rpa, hpi, extra, convert=True):
if convert:
neuromag_trans = get_ras_to_neuromag_trans(nasion, lpa, rpa)
nasion = apply_trans(neuromag_trans, nasion)
lpa = apply_trans(neuromag_trans, lpa)
rpa = apply_trans(neuromag_trans, rpa)
if hpi is not None:
hpi = apply_trans(neuromag_trans, hpi)
extra = apply_trans(neuromag_trans, extra).astype(np.float32)
else:
neuromag_trans = None
ctf_head_t = Transform(fro='ctf_head', to='head', trans=neuromag_trans)
info_dig = _make_dig_points(nasion=nasion,
lpa=lpa,
rpa=rpa,
hpi=hpi,
extra_points=extra)
return info_dig, ctf_head_t
##############################################################################
# From artemis123 (we have modified the function a bit)
def _artemis123_read_pos(nas, lpa, rpa, hpi, extra):
# move into MNE head coords
dig_points, _ = _call_make_dig_points(nas, lpa, rpa, hpi, extra)
return dig_points
##############################################################################
# From bti
def _make_bti_dig_points(nasion, lpa, rpa, hpi, extra,
convert=False, use_hpi=False,
bti_dev_t=False, dev_ctf_t=False):
_hpi = hpi if use_hpi else None
info_dig, ctf_head_t = _call_make_dig_points(nasion, lpa, rpa, _hpi, extra,
convert)
if convert:
t = combine_transforms(invert_transform(bti_dev_t), dev_ctf_t,
'meg', 'ctf_head')
dev_head_t = combine_transforms(t, ctf_head_t, 'meg', 'head')
else:
dev_head_t = Transform('meg', 'head', trans=None)
return info_dig, dev_head_t, ctf_head_t # ctf_head_t should not be needed
|
olafhauk/mne-python
|
mne/io/_digitization.py
|
Python
|
bsd-3-clause
| 18,017
|
from typing import List, Pattern
from recognizers_text.utilities import RegExpUtility
from recognizers_text.extractor import Extractor
from recognizers_number.number.french.extractors import FrenchIntegerExtractor
from ...resources.french_date_time import FrenchDateTime
from ..extractors import DateTimeExtractor
from ..base_timeperiod import TimePeriodExtractorConfiguration, MatchedIndex
from ..base_time import BaseTimeExtractor
from .time_extractor_config import FrenchTimeExtractorConfiguration
from .base_configs import FrenchDateTimeUtilityConfiguration
class FrenchTimePeriodExtractorConfiguration(TimePeriodExtractorConfiguration):
@property
def simple_cases_regex(self) -> List[Pattern]:
return self._simple_cases_regex
@property
def till_regex(self) -> Pattern:
return self._till_regex
@property
def time_of_day_regex(self) -> Pattern:
return self._time_of_day_regex
@property
def general_ending_regex(self) -> Pattern:
return self._general_ending_regex
@property
def single_time_extractor(self) -> DateTimeExtractor:
return self._single_time_extractor
@property
def integer_extractor(self) -> Extractor:
return self._integer_extractor
def __init__(self):
self._single_time_extractor = BaseTimeExtractor(FrenchTimeExtractorConfiguration())
self._integer_extractor = FrenchIntegerExtractor()
self.utility_configuration = FrenchDateTimeUtilityConfiguration()
self._simple_cases_regex: List[Pattern] = [
RegExpUtility.get_safe_reg_exp(FrenchDateTime.PureNumFromTo),
RegExpUtility.get_safe_reg_exp(FrenchDateTime.PureNumBetweenAnd),
RegExpUtility.get_safe_reg_exp(FrenchDateTime.PmRegex),
RegExpUtility.get_safe_reg_exp(FrenchDateTime.AmRegex)
]
self._till_regex: Pattern = RegExpUtility.get_safe_reg_exp(FrenchDateTime.TillRegex)
self._time_of_day_regex: Pattern = RegExpUtility.get_safe_reg_exp(FrenchDateTime.TimeOfDayRegex)
self._general_ending_regex: Pattern = RegExpUtility.get_safe_reg_exp(FrenchDateTime.GeneralEndingRegex)
self.from_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.FromRegex2)
self.connector_and_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.ConnectorAndRegex)
self.before_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.BeforeRegex2)
def get_from_token_index(self, source: str) -> MatchedIndex:
match = self.from_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def get_between_token_index(self, source: str) -> MatchedIndex:
match = self.before_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def has_connector_token(self, source: str) -> bool:
match = self.connector_and_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
|
matthewshim-ms/Recognizers-Text
|
Python/libraries/recognizers-date-time/recognizers_date_time/date_time/french/timeperiod_extractor_config.py
|
Python
|
mit
| 3,112
|
# !/usr/bin/env python
# coding=utf-8
"""
Configuration for vcr samples.
"""
import logging
from baidubce.bce_client_configuration import BceClientConfiguration
from baidubce.auth.bce_credentials import BceCredentials
HOST = 'http://vcr.bj.baidubce.com'
AK = 'Fill AK here'
SK = 'Fill SK here'
logger = logging.getLogger('baidubce.services.vcr.vcrclient')
fh = logging.FileHandler('sample.log')
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.setLevel(logging.DEBUG)
logger.addHandler(fh)
config = BceClientConfiguration(credentials=BceCredentials(AK, SK), endpoint=HOST)
|
baidubce/bce-sdk-python
|
sample/vcr/vcr_sample_conf.py
|
Python
|
apache-2.0
| 675
|
import PyQt5.QtWidgets as QtWidgets
import PyQt5.QtGui as QtGui
class Blink(QtWidgets.QGraphicsRectItem):
"""
Represents a shp file diagram background
"""
ZVALUE = -1000
def __init__(self, rect, parent=None):
super(Blink, self).__init__(rect, parent)
brush = QtGui.QBrush(QtGui.QColor(232, 232, 232))
self.setBrush(brush)
self.setZValue(self.ZVALUE)
|
GeoMop/GeoMop
|
src/LayerEditor/ui/gitems/blink.py
|
Python
|
gpl-3.0
| 423
|
"""
Example of creating a radar chart (a.k.a. a spider or star chart) [1]_.
Although this example allows a frame of either 'circle' or 'polygon', polygon
frames don't have proper gridlines (the lines are circles instead of polygons).
It's possible to get a polygon grid by setting GRIDLINE_INTERPOLATION_STEPS in
matplotlib.axis to the desired number of vertices, but the orientation of the
polygon is not aligned with the radial axes.
.. [1] http://en.wikipedia.org/wiki/Radar_chart
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.path import Path
from matplotlib.spines import Spine
from matplotlib.projections.polar import PolarAxes
from matplotlib.projections import register_projection
def radar_factory(num_vars, frame='circle'):
"""Create a radar chart with `num_vars` axes.
This function creates a RadarAxes projection and registers it.
Parameters
----------
num_vars : int
Number of variables for radar chart.
frame : {'circle' | 'polygon'}
Shape of frame surrounding axes.
"""
# calculate evenly-spaced axis angles
theta = np.linspace(0, 2*np.pi, num_vars, endpoint=False)
# rotate theta such that the first axis is at the top
theta += np.pi/2
def draw_poly_patch(self):
verts = unit_poly_verts(theta)
return plt.Polygon(verts, closed=True, edgecolor='k')
def draw_circle_patch(self):
# unit circle centered on (0.5, 0.5)
return plt.Circle((0.5, 0.5), 0.5)
patch_dict = {'polygon': draw_poly_patch, 'circle': draw_circle_patch}
if frame not in patch_dict:
raise ValueError('unknown value for `frame`: %s' % frame)
class RadarAxes(PolarAxes):
name = 'radar'
# use 1 line segment to connect specified points
RESOLUTION = 1
# define draw_frame method
draw_patch = patch_dict[frame]
def fill(self, *args, **kwargs):
"""Override fill so that line is closed by default"""
closed = kwargs.pop('closed', True)
return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)
def plot(self, *args, **kwargs):
"""Override plot so that line is closed by default"""
lines = super(RadarAxes, self).plot(*args, **kwargs)
for line in lines:
self._close_line(line)
def _close_line(self, line):
x, y = line.get_data()
# FIXME: markers at x[0], y[0] get doubled-up
if x[0] != x[-1]:
x = np.concatenate((x, [x[0]]))
y = np.concatenate((y, [y[0]]))
line.set_data(x, y)
def set_varlabels(self, labels):
self.set_thetagrids(np.degrees(theta), labels)
def _gen_axes_patch(self):
return self.draw_patch()
def _gen_axes_spines(self):
if frame == 'circle':
return PolarAxes._gen_axes_spines(self)
# The following is a hack to get the spines (i.e. the axes frame)
# to draw correctly for a polygon frame.
# spine_type must be 'left', 'right', 'top', 'bottom', or `circle`.
spine_type = 'circle'
verts = unit_poly_verts(theta)
# close off polygon by repeating first vertex
verts.append(verts[0])
path = Path(verts)
spine = Spine(self, spine_type, path)
spine.set_transform(self.transAxes)
return {'polar': spine}
register_projection(RadarAxes)
return theta
def unit_poly_verts(theta):
"""Return vertices of polygon for subplot axes.
This polygon is circumscribed by a unit circle centered at (0.5, 0.5)
"""
x0, y0, r = [0.5] * 3
verts = [(r*np.cos(t) + x0, r*np.sin(t) + y0) for t in theta]
return verts
def example_data():
# The following data is from the Denver Aerosol Sources and Health study.
# See doi:10.1016/j.atmosenv.2008.12.017
#
# The data are pollution source profile estimates for five modeled
# pollution sources (e.g., cars, wood-burning, etc) that emit 7-9 chemical
# species. The radar charts are experimented with here to see if we can
# nicely visualize how the modeled source profiles change across four
# scenarios:
# 1) No gas-phase species present, just seven particulate counts on
# Sulfate
# Nitrate
# Elemental Carbon (EC)
# Organic Carbon fraction 1 (OC)
# Organic Carbon fraction 2 (OC2)
# Organic Carbon fraction 3 (OC3)
# Pyrolized Organic Carbon (OP)
# 2)Inclusion of gas-phase specie carbon monoxide (CO)
# 3)Inclusion of gas-phase specie ozone (O3).
# 4)Inclusion of both gas-phase speciesis present...
data = [
['Sulfate', 'Nitrate', 'EC', 'OC1', 'OC2', 'OC3', 'OP', 'CO', 'O3'],
('Basecase', [
[0.88, 0.01, 0.03, 0.03, 0.00, 0.06, 0.01, 0.00, 0.00],
[0.07, 0.95, 0.04, 0.05, 0.00, 0.02, 0.01, 0.00, 0.00],
[0.01, 0.02, 0.85, 0.19, 0.05, 0.10, 0.00, 0.00, 0.00],
[0.02, 0.01, 0.07, 0.01, 0.21, 0.12, 0.98, 0.00, 0.00],
[0.01, 0.01, 0.02, 0.71, 0.74, 0.70, 0.00, 0.00, 0.00]]),
('With CO', [
[0.88, 0.02, 0.02, 0.02, 0.00, 0.05, 0.00, 0.05, 0.00],
[0.08, 0.94, 0.04, 0.02, 0.00, 0.01, 0.12, 0.04, 0.00],
[0.01, 0.01, 0.79, 0.10, 0.00, 0.05, 0.00, 0.31, 0.00],
[0.00, 0.02, 0.03, 0.38, 0.31, 0.31, 0.00, 0.59, 0.00],
[0.02, 0.02, 0.11, 0.47, 0.69, 0.58, 0.88, 0.00, 0.00]]),
('With O3', [
[0.89, 0.01, 0.07, 0.00, 0.00, 0.05, 0.00, 0.00, 0.03],
[0.07, 0.95, 0.05, 0.04, 0.00, 0.02, 0.12, 0.00, 0.00],
[0.01, 0.02, 0.86, 0.27, 0.16, 0.19, 0.00, 0.00, 0.00],
[0.01, 0.03, 0.00, 0.32, 0.29, 0.27, 0.00, 0.00, 0.95],
[0.02, 0.00, 0.03, 0.37, 0.56, 0.47, 0.87, 0.00, 0.00]]),
('CO & O3', [
[0.87, 0.01, 0.08, 0.00, 0.00, 0.04, 0.00, 0.00, 0.01],
[0.09, 0.95, 0.02, 0.03, 0.00, 0.01, 0.13, 0.06, 0.00],
[0.01, 0.02, 0.71, 0.24, 0.13, 0.16, 0.00, 0.50, 0.00],
[0.01, 0.03, 0.00, 0.28, 0.24, 0.23, 0.00, 0.44, 0.88],
[0.02, 0.00, 0.18, 0.45, 0.64, 0.55, 0.86, 0.00, 0.16]])
]
return data
if __name__ == '__main__':
N = 9
theta = radar_factory(N, frame='polygon')
data = example_data()
spoke_labels = data.pop(0)
fig = plt.figure(figsize=(9, 9))
fig.subplots_adjust(wspace=0.25, hspace=0.20, top=0.85, bottom=0.05)
colors = ['b', 'r', 'g', 'm', 'y']
# Plot the four cases from the example data on separate axes
for n, (title, case_data) in enumerate(data):
ax = fig.add_subplot(2, 2, n + 1, projection='radar')
plt.rgrids([0.2, 0.4, 0.6, 0.8])
ax.set_title(title, weight='bold', size='medium', position=(0.5, 1.1),
horizontalalignment='center', verticalalignment='center')
for d, color in zip(case_data, colors):
ax.plot(theta, d, color=color)
ax.fill(theta, d, facecolor=color, alpha=0.25)
ax.set_varlabels(spoke_labels)
# add legend relative to top-left plot
plt.subplot(2, 2, 1)
labels = ('Factor 1', 'Factor 2', 'Factor 3', 'Factor 4', 'Factor 5')
legend = plt.legend(labels, loc=(0.9, .95), labelspacing=0.1)
plt.setp(legend.get_texts(), fontsize='small')
plt.figtext(0.5, 0.965, '5-Factor Solution Profiles Across Four Scenarios',
ha='center', color='black', weight='bold', size='large')
plt.show()
|
bundgus/python-playground
|
matplotlib-playground/examples/api/radar_chart.py
|
Python
|
mit
| 7,632
|
"""
These functions are used protectors to implement :ref:`scoping <auth-scopes>`.
"""
import re
#: A special scope item that implicitly encapsulates all other scope items
ANY = {"$^&#THISISGARBAGE#*@&@#$*@$&DFDF#&#@&@&##*&@DHJGDJH#@&*^@#*+crud"}
def normalize_scope_items(scopes, default_mode="r", raise_err=True):
"""
Return a set of scope items that have been normalized.
A normalized set of scope items is one where every item
is in the format:
.. productionlist:: normalized_scope
norm_scope : `scope_name`+`permission`
Input scope items are assumed to be 'r' by default. Example,
the scope item ``user`` will normalize to ``user+r``.
Input scope items that contain more than one permission are
expanded to multiple scope items. For example the scope item
``user+ud`` is expanded to (``user+u``, ``user+d``).
Note that permissions are atomic, and none implies another.
For example, ``user+u`` will expand to ``user+u`` and NOT
(``user+r``, ``user+u``).
:param scopes: A list of :ref:`scope items <auth-scopes>`.
:param default_mode: The permission that should be assumed if one is
omitted.
:param raise_err: If ``True``, malformed scopes will raise a
:class:`ValueError`. Otherwise they are omitted.
"""
normalized = set()
rep = re.compile(r'''^ # Regex matched entire string
(?P<item> # Capture group for a scope item
(?: # One or more of any of these:
[^\W\d_] # alpha numerics or underscore
|
[!#-*,-\[\]-~] # Most special ascii characters
# except: '+' and '\'
)+
)
(?: # The permissions section
\+ # Preceeded by a '+'
(?P<permissions> # followed by one or more of
[crud]+ # 'c' 'r' 'u' or 'd'
)
)? # Permissions are optional.
$''', re.UNICODE | re.VERBOSE)
for item in scopes:
match = rep.fullmatch(item)
if match is not None:
item = match.group("item")
permissions = match.group("permissions") or default_mode
for p in permissions:
normalized.add("{item}+{p}".format(**locals()))
elif raise_err:
raise ValueError(item)
return normalized
def check_encapsulates(root, child, sep="/"):
"""
Check that one scope item encapsulates of another.
A :token:`scope <auth-scopes>` item encapsulates when it is a super-scope
of the other, and when its permissions are a superset of the other's
permissions.
This is used to implement sub-scopes, where permissions granted on
a broad scope can be used to imply permissions for a sub-scope. By default,
sub-scopes are denoted by a preceeding '/'.
For example, a scope permission if ``user+r`` is granted to an agent, then
that agent is also implied to have been granted ``user/emails+r``,
``user/friends+r`` and so on.
:param root: A super-scope
:param child: A potential sub-scope
:param sep: The separator that is used to denote sub-scopes.
"""
if root == ANY:
return True
root_fragment, root_permissions = root.split("+")
child_fragment, child_permissions = child.split("+")
if sep is None:
# In this case, disable checking for branched scope items, but enable
# checking for scope items with a subset of the permissions.
rep = re.compile("^{0}$".format(re.escape(root_fragment)), re.U)
else:
root_fragment = root_fragment[:-1] \
if root_fragment.endswith(sep) \
else root_fragment
# Use a regular expression to verify that the child fragment is indeed
# a sub scope of the root fragment. It checks
rep = re.compile("^({0})$|({0})/".format(
re.escape(root_fragment)), re.U)
root_permissions = set(root_permissions)
child_permissions = set(child_permissions)
if not root_permissions.issuperset(child_permissions):
return False
elif not rep.match(child_fragment):
return False
else:
return True
def find_encapsulating_scope(scope, scopes, sep="/"):
for scp in scopes:
if check_encapsulates(scp, scopes, "/"):
return scp
else:
return None
def compress_scope_items(scopes, default_mode="r"):
"""
Return a set of equivalent scope items that may
be smaller in size.
Input scope items must be a normalized set of scope
items.
"""
item_hash = {}
compressed = set()
default_permissions = set(default_mode)
# Catalog which permissions have been collected for each scope item
# fragment
for item in scopes:
fragment, p = item.split("+")
item_hash.setdefault(fragment, set())
item_hash[fragment].update(p)
# Rebuild the set of scopes from catalog, dropping
# fragments that are covered by shorter fragments
# in the catalog
for fragment in item_hash:
permissions = item_hash[fragment]
parts = fragment.split("/")
for i in range(len(parts)):
frag = "/".join(parts[:i])
if item_hash.get(frag, set()).issuperset(permissions):
break
else:
if permissions == default_permissions:
compressed.add(fragment)
else:
compressed.add("+".join((fragment, "".join(permissions))))
return compressed
|
geniphi/findig
|
findig/tools/protector/scopeutil.py
|
Python
|
mit
| 5,779
|
import os
import sys
import shutil
import logging
import codecs
import inspect
import re
from string import Template
from runner import check_output
_log = logging.getLogger('utils')
def safe_makedirs(path):
try:
os.makedirs(path)
except OSError, e:
# Ignore if it exists
if e.errno != 17:
raise e
def load_env(path):
_log.info("Loading environment from [%s]", path)
env = {}
with open(path, 'rt') as envFile:
for line in envFile:
name, val = line.strip().split('=', 1)
env[name.strip()] = val.strip()
_log.debug("Loaded environment [%s]", env)
return env
def load_processes(path):
_log.info("Loading processes from [%s]", path)
procs = {}
with open(path, 'rt') as procFile:
for line in procFile:
name, cmd = line.strip().split(':', 1)
procs[name.strip()] = cmd.strip()
_log.debug("Loaded processes [%s]", procs)
return procs
def load_extension(path):
_log.debug("Loading extension from [%s]", path)
init = os.path.join(path, '__init__.py')
if not os.path.exists(init):
with open(init, 'w'):
pass # just create an empty file
try:
sys.path.append(os.path.dirname(path))
extn = __import__('%s.extension' % os.path.basename(path),
fromlist=['extension'])
finally:
sys.path.remove(os.path.dirname(path))
return extn
def process_extension(path, ctx, to_call, success, args=None, ignore=False):
_log.debug('Processing extension from [%s] with method [%s]',
path, to_call)
if not args:
args = [ctx]
extn = load_extension(path)
try:
if hasattr(extn, to_call):
success(getattr(extn, to_call)(*args))
except Exception:
if ignore:
_log.exception("Error with extension [%s]" % path)
else:
raise
def process_extensions(ctx, to_call, success, args=None, ignore=False):
for path in ctx['EXTENSIONS']:
process_extension(path, ctx, to_call, success, args, ignore)
def rewrite_with_template(template, cfgPath, ctx):
with codecs.open(cfgPath, encoding='utf-8') as fin:
data = fin.read()
with codecs.open(cfgPath, encoding='utf-8', mode='wt') as out:
out.write(template(data).safe_substitute(ctx))
def rewrite_cfgs(toPath, ctx, delim='#'):
class RewriteTemplate(Template):
delimiter = delim
if os.path.isdir(toPath):
_log.info("Rewriting configuration under [%s]", toPath)
for root, dirs, files in os.walk(toPath):
for f in files:
cfgPath = os.path.join(root, f)
_log.debug("Rewriting [%s]", cfgPath)
rewrite_with_template(RewriteTemplate, cfgPath, ctx)
else:
_log.info("Rewriting configuration file [%s]", toPath)
rewrite_with_template(RewriteTemplate, toPath, ctx)
def find_git_url(bp_dir):
if os.path.exists(os.path.join(bp_dir, '.git')):
try:
url = check_output(['git', '--git-dir=%s/.git' % bp_dir,
'config', '--get', 'remote.origin.url'])
commit = check_output(['git', '--git-dir=%s/.git' % bp_dir,
'rev-parse', '--short', 'HEAD'])
if url and commit:
return "%s#%s" % (url.strip(), commit.strip())
except OSError:
_log.debug("Git does not seem to be installed / available",
exc_info=True)
class FormattedDictWrapper(object):
def __init__(self, obj):
self.obj = obj
def unwrap(self):
return self.obj
def __str__(self):
return self.obj.__str__()
def __repr__(self):
return self.obj.__repr__()
def wrap(obj):
return FormattedDictWrapper(obj)
class FormattedDict(dict):
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
def format(self, val):
if hasattr(val, 'format'):
val = val.format(**self)
newVal = val.format(**self)
while val != newVal:
val = newVal
newVal = newVal.format(**self)
return val
return val.unwrap() if hasattr(val, 'unwrap') else val
def __getitem__(self, key):
return self.format(dict.__getitem__(self, key))
def get(self, *args, **kwargs):
if kwargs.get('format', True):
return self.format(dict.get(self, *args))
else:
tmp = dict.get(self, *args)
return tmp.unwrap() if hasattr(tmp, 'unwrap') else tmp
def __setitem__(self, key, val):
if _log.isEnabledFor(logging.DEBUG):
frame = inspect.currentframe()
caller = inspect.getouterframes(frame, 2)
info = caller[1]
_log.debug('line #%s in %s, "%s" is setting [%s] = [%s]',
info[2], info[1], info[3], key, val)
dict.__setitem__(self, key, val)
class ConfigFileEditor(object):
def __init__(self, cfgPath):
with open(cfgPath, 'rt') as cfg:
self._lines = cfg.readlines()
def find_lines_matching(self, regex):
if hasattr(regex, 'strip'):
regex = re.compile(regex)
if not hasattr(regex, 'match'):
raise ValueError("must be str or RegexObject")
return [line.strip() for line in self._lines if regex.match(line)]
def update_lines(self, regex, repl):
if hasattr(regex, 'strip'):
regex = re.compile(regex)
if not hasattr(regex, 'match'):
raise ValueError("must be str or RegexObject")
self._lines = [regex.sub(repl, line) for line in self._lines]
def append_lines(self, lines):
self._lines.extend(lines)
def insert_after(self, regex, lines):
if hasattr(regex, 'strip'):
regex = re.compile(regex)
if not hasattr(regex, 'match'):
raise ValueError("must be str or RegexObject")
for i, line in enumerate(self._lines):
if regex.match(line):
for j, item in enumerate(["%s\n" % l for l in lines]):
self._lines.insert((i + j + 1), item)
break
def save(self, cfgPath):
with open(cfgPath, 'wt') as cfg:
cfg.writelines(self._lines)
def unique(seq):
"""Return only the unique items in the given list, but preserve order"""
# http://stackoverflow.com/a/480227
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
# This is copytree from PyPy 2.7 source code.
# https://bitbucket.org/pypy/pypy/src/9d88b4875d6e/lib-python/2.7/shutil.py
# Modifying this so that it doesn't care about an initial directory existing
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
def copytree(src, dst, symlinks=False, ignore=None):
"""Recursively copy a directory tree using copy2().
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
XXX Consider this example code rather than the ultimate tool.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
try:
os.makedirs(dst)
except OSError, e:
if e.errno != 17: # File exists
raise e
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore)
else:
# Will raise a SpecialFileError for unsupported file types
shutil.copy2(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error, err:
errors.extend(err.args[0])
except EnvironmentError, why:
errors.append((srcname, dstname, str(why)))
try:
shutil.copystat(src, dst)
except OSError, why:
if WindowsError is not None and isinstance(why, WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
raise shutil.Error, errors
|
lloydbadger/test
|
lib/build_pack_utils/utils.py
|
Python
|
apache-2.0
| 10,306
|
import magic
import pytest
import requests
import requests_mock
from flaky import flaky
from canteens import cafenero
from canteens.canteen import FISH, MEAT, VEGAN, VEGGIE
@pytest.fixture(scope='module')
def pdf_file():
html = cafenero.download_website()
link = cafenero.extract_dropbox_link(html)
tmpdir_of_pdf = cafenero.get_pdf(link)
return tmpdir_of_pdf
@flaky
def test_download_website__with_live_site():
html = cafenero.download_website()
assert 'Speisekarte als' in html
def test_download_website__with_connect_timeout():
with requests_mock.Mocker() as m:
with pytest.raises(requests.exceptions.ConnectTimeout):
m.get(requests_mock.ANY, exc=requests.exceptions.ConnectTimeout)
cafenero.download_website()
def test_extract_dropbox_link():
html = cafenero.download_website()
link = cafenero.extract_dropbox_link(html)
assert 'dropbox' in link
assert 'speisekarte.pdf' in link
@flaky
def test_get_pdf(pdf_file):
filetype = magic.from_file('%s/cafenero.pdf' % pdf_file, mime=True)
assert filetype == 'application/pdf'
def test_get_pdf__with_connect_timeout():
with requests_mock.Mocker() as m:
with pytest.raises(requests.exceptions.ConnectTimeout):
m.get(requests_mock.ANY, exc=requests.exceptions.ConnectTimeout)
html = cafenero.download_website()
link = cafenero.extract_dropbox_link(html)
cafenero.get_pdf(link)
@flaky
def test_pdf_to_text(pdf_file):
menu = cafenero.pdf_to_text(pdf_file)
assert menu != ''
def test_text_to_menu_list():
text = 'cafeneroindervolkswagenuniversitätsbibliothekberlin\nmittagstisch – freitag den 06. oktober 2017 von ' \
'12:00 bis 20:00 uhr\nkleine portion suppe 2,80 € --- kleine portion nudeln 3,60 €\n' \
'pellkartoffeln mit hausgemachtem kräuterquark, leinoel und salat 4,40 €\n\n\nrussische ' \
'gemüse-bortschtsch mit\ndill, schmand, lauchzwiebeln + biomehrkornbrot 3,80 € ' \
' vegetarisch\n\n\n\nspaghetti bolognese\nmit frühlingszwiebeln + petersilie + ' \
'parmesankäse 4,80 € rindfleisch\n\n\n\nspaghetti mit ' \
'tomaten,\nartischocken, lauchzwiebeln+ (parmesan) 4,80 € ' \
'vegetarisch (vegan)\n\n\n\nspaghetti mit salbei-olivenoel und (parmesankäse) 4,80 € ' \
' vegetarisch (vegan)\n\n\n\nkartoffel-rosenkohl-quiche\nmit gorgonzola ' \
'und gemischtem salat vegetarisch 6,00 €\nfilet vom schwarzen heilbutt ' \
'mit chili-koriander-dip\nauf basmatireis mit zucchini-kirschtomaten-gemüse ' \
' fisch 6,50 €\n\x0c'
expected_items = [
'mittagstisch – freitag den 06. oktober 2017 von 12:00 bis 20:00 uhr',
'\n',
'kleine portion suppe 2,80€ vegetarisch',
'kleine portion nudeln 3,60€ vegetarisch',
'pellkartoffeln mit hausgemachtem kräuterquark, leinoel und salat 4,40€ vegetarisch',
'russische gemüse-bortschtsch mit dill, schmand, lauchzwiebeln + biomehrkornbrot 3,80€ vegetarisch',
'spaghetti bolognese mit frühlingszwiebeln + petersilie + parmesankäse 4,80€ rindfleisch',
'spaghetti mit tomaten, artischocken, lauchzwiebeln+ (parmesan) 4,80€ vegetarisch (vegan)',
'spaghetti mit salbei-olivenoel und (parmesankäse) 4,80€ vegetarisch (vegan)',
'kartoffel-rosenkohl-quiche mit gorgonzola und gemischtem salat vegetarisch 6,00€',
'filet vom schwarzen heilbutt mit chili-koriander-dip auf basmatireis mit zucchini-kirschtomaten-gemüse '
'fisch 6,50€'
]
menu = cafenero.text_to_menu_list(text)
assert menu == expected_items
def test_annotate_menu():
menu = [
'mittagstisch – freitag den 06. oktober 2017 von 12:00 bis 20:00 uhr',
'\n',
'kleine portion suppe 2,80€ vegetarisch',
'kleine portion nudeln 3,60€ vegetarisch',
'pellkartoffeln mit hausgemachtem kräuterquark, leinoel und salat 4,40€ vegetarisch',
'russische gemüse-bortschtsch mit dill, schmand, lauchzwiebeln + biomehrkornbrot 3,80€ vegetarisch',
'spaghetti bolognese mit frühlingszwiebeln + petersilie + parmesankäse 4,80€ rindfleisch',
'spaghetti mit tomaten, artischocken, lauchzwiebeln+ (parmesan) 4,80€ vegetarisch (vegan)',
'spaghetti mit salbei-olivenoel und (parmesankäse) 4,80€ vegetarisch (vegan)',
'kartoffel-rosenkohl-quiche mit gorgonzola und gemischtem salat vegetarisch 6,00€',
'filet vom schwarzen heilbutt mit chili-koriander-dip auf basmatireis mit zucchini-kirschtomaten-gemüse '
'fisch 6,50€'
]
expected_annotations = 'mittagstisch – freitag den 06. oktober 2017 von 12:00 bis 20:00 uhr\n\n' \
'%s kleine portion suppe 2,80€\n' \
'%s kleine portion nudeln 3,60€\n' \
'%s pellkartoffeln mit hausgemachtem kräuterquark, leinoel und salat 4,40€\n' \
'%s russische gemüse-bortschtsch mit dill, schmand, lauchzwiebeln + biomehrkornbrot ' \
'3,80€\n' \
'%s spaghetti bolognese mit frühlingszwiebeln + petersilie + parmesankäse 4,80€\n' \
'%s spaghetti mit tomaten, artischocken, lauchzwiebeln+ (parmesan) 4,80€\n' \
'%s spaghetti mit salbei-olivenoel und (parmesankäse) 4,80€\n' \
'%s kartoffel-rosenkohl-quiche mit gorgonzola und gemischtem salat 6,00€\n' \
'%s filet vom schwarzen heilbutt mit chili-koriander-dip auf basmatireis mit ' \
'zucchini-kirschtomaten-gemüse 6,50€' % (VEGGIE, VEGGIE, VEGGIE, VEGGIE, MEAT, VEGAN, VEGAN, VEGGIE, FISH)
annotated_menu = cafenero.annotate_menu(menu)
assert annotated_menu == expected_annotations
@flaky
def test_main():
menu = cafenero.main()
assert 'mittagstisch' in menu
|
ekeih/OmNomNom
|
tests/test_cafenero.py
|
Python
|
agpl-3.0
| 6,361
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('patients', '0007_auto_20150915_1546'),
]
operations = [
migrations.AlterField(
model_name='patient',
name='maiden_name',
field=models.CharField(
max_length=100,
null=True,
verbose_name='Maiden name (if applicable)',
blank=True),
),
]
|
muccg/rdrf
|
rdrf/registry/patients/migrations/0008_auto_20150916_1518.py
|
Python
|
agpl-3.0
| 503
|
# smartmirror.py
# requirements
# requests, feedparser, traceback, Pillow
from Tkinter import *
import locale
import threading
import time
import requests
import json
import traceback
import feedparser
from PIL import Image, ImageTk
from contextlib import contextmanager
LOCALE_LOCK = threading.Lock()
ui_locale = '' # e.g. 'fr_FR' fro French, '' as default
time_format = 12 # 12 or 24
date_format = "%b %d, %Y" # check python doc for strftime() for options
news_country_code = 'us'
weather_api_token = '<TOKEN>' # create account at https://darksky.net/dev/
weather_lang = 'en' # see https://darksky.net/dev/docs/forecast for full list of language parameters values
weather_unit = 'us' # see https://darksky.net/dev/docs/forecast for full list of unit parameters values
latitude = None # Set this if IP location lookup does not work for you (must be a string)
longitude = None # Set this if IP location lookup does not work for you (must be a string)
xlarge_text_size = 94
large_text_size = 48
medium_text_size = 28
small_text_size = 18
@contextmanager
def setlocale(name): #thread proof function to work with locale
with LOCALE_LOCK:
saved = locale.setlocale(locale.LC_ALL)
try:
yield locale.setlocale(locale.LC_ALL, name)
finally:
locale.setlocale(locale.LC_ALL, saved)
# maps open weather icons to
# icon reading is not impacted by the 'lang' parameter
icon_lookup = {
'clear-day': "assets/Sun.png", # clear sky day
'wind': "assets/Wind.png", #wind
'cloudy': "assets/Cloud.png", # cloudy day
'partly-cloudy-day': "assets/PartlySunny.png", # partly cloudy day
'rain': "assets/Rain.png", # rain day
'snow': "assets/Snow.png", # snow day
'snow-thin': "assets/Snow.png", # sleet day
'fog': "assets/Haze.png", # fog day
'clear-night': "assets/Moon.png", # clear sky night
'partly-cloudy-night': "assets/PartlyMoon.png", # scattered clouds night
'thunderstorm': "assets/Storm.png", # thunderstorm
'tornado': "assests/Tornado.png", # tornado
'hail': "assests/Hail.png" # hail
}
class Clock(Frame):
def __init__(self, parent, *args, **kwargs):
Frame.__init__(self, parent, bg='black')
# initialize time label
self.time1 = ''
self.timeLbl = Label(self, font=('Helvetica', large_text_size), fg="white", bg="black")
self.timeLbl.pack(side=TOP, anchor=E)
# initialize day of week
self.day_of_week1 = ''
self.dayOWLbl = Label(self, text=self.day_of_week1, font=('Helvetica', small_text_size), fg="white", bg="black")
self.dayOWLbl.pack(side=TOP, anchor=E)
# initialize date label
self.date1 = ''
self.dateLbl = Label(self, text=self.date1, font=('Helvetica', small_text_size), fg="white", bg="black")
self.dateLbl.pack(side=TOP, anchor=E)
self.tick()
def tick(self):
with setlocale(ui_locale):
if time_format == 12:
time2 = time.strftime('%I:%M %p') #hour in 12h format
else:
time2 = time.strftime('%H:%M') #hour in 24h format
day_of_week2 = time.strftime('%A')
date2 = time.strftime(date_format)
# if time string has changed, update it
if time2 != self.time1:
self.time1 = time2
self.timeLbl.config(text=time2)
|
ansgmlen/ai-mirror
|
python/sample.py
|
Python
|
mit
| 3,396
|
# Copyright 2019 Roberto Fichera - Level Prime Srl
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import base64
from odoo import _, models
from odoo.exceptions import ValidationError
class WizardExportFatturapa(models.TransientModel):
_inherit = "wizard.export.fatturapa"
def updateAttachment(self, attach, fatturapa):
attach_str = fatturapa.to_xml(self.env)
attach.write(
{
"datas": base64.encodebytes(attach_str),
"state": "ready",
}
)
def exportFatturaPARegenerate(self):
invoice_obj = self.env["account.move"]
attachments = self.env["fatturapa.attachment.out"]
# Browse active invoice
active_id = invoice_obj.browse(self._context.get("active_id"))
if not active_id:
raise ValidationError(_("The method can be called with a valid active_id"))
# Search all the invoices belonging the same xml file
invoice_ids = invoice_obj.search(
[
(
"fatturapa_attachment_out_id",
"=",
active_id.fatturapa_attachment_out_id.id,
)
]
).ids
attach = active_id.fatturapa_attachment_out_id
if not attach:
raise ValidationError(
_(
"The invoice cannot be regenerated because doesn't have a "
"e-invoice attachment associated to it"
)
)
partner = active_id.partner_id
context_partner = self.env.context.copy()
context_partner.update({"lang": partner.lang})
fatturapa, number = self.exportInvoiceXML(
partner, invoice_ids, attach=attach, context=context_partner
)
self.updateAttachment(attach, fatturapa)
attachments |= attach
action = {
"name": "Re-Export Electronic Invoice",
"res_model": "fatturapa.attachment.out",
"type": "ir.actions.act_window",
}
if len(attachments) == 1:
action["view_mode"] = "form"
action["res_id"] = attachments[0].id
else:
action["view_mode"] = "tree,form"
action["domain"] = [("id", "in", attachments.ids)]
return action
|
OCA/l10n-italy
|
l10n_it_fatturapa_out/wizard/wizard_export_fatturapa_regenerate.py
|
Python
|
agpl-3.0
| 2,357
|
import string
import regex
from collections import deque
from flanker.mime.message.headers import encodedword, parametrized
from flanker.mime.message.headers.wrappers import ContentType, WithParams
from flanker.mime.message.errors import DecodingError
from flanker.utils import to_unicode, is_pure_ascii
MAX_LINE_LENGTH = 10000
def normalize(header):
return string.capwords(header.lower(), '-')
def parse_stream(stream):
"""Reads the incoming stream and returns list of tuples"""
out = deque()
for header in unfold(split(stream)):
out.append(parse_header(header))
return out
def parse_header(header):
""" Accepts a raw header with name, colons and newlines
and returns it's parsed value
"""
name, val = split2(header)
if not is_pure_ascii(name):
raise DecodingError("Non-ascii header name")
return name, parse_header_value(name, encodedword.unfold(val))
def parse_header_value(name, val):
if not is_pure_ascii(val):
if parametrized.is_parametrized(name, val):
raise DecodingError("Unsupported value in content- header")
return to_unicode(val)
else:
if parametrized.is_parametrized(name, val):
val, params = parametrized.decode(val)
if name == 'Content-Type':
main, sub = parametrized.fix_content_type(val)
return ContentType(main, sub, params)
else:
return WithParams(val, params)
else:
return val
def is_empty(line):
return line in ('\r\n', '\r', '\n')
RE_HEADER = regex.compile(r'^(From |[\041-\071\073-\176]+:|[\t ])')
def split(fp):
"""Read lines with headers until the start of body"""
lines = deque()
for line in fp:
if len(line) > MAX_LINE_LENGTH:
raise DecodingError(
"Line is too long: {0}".format(len(line)))
if is_empty(line):
break
# tricky case if it's not a header and not an empty line
# ususally means that user forgot to separate the body and newlines
# so "unread" this line here, what means to treat it like a body
if not RE_HEADER.match(line):
fp.seek(fp.tell() - len(line))
break
lines.append(line)
return lines
def unfold(lines):
headers = deque()
for line in lines:
# ignore unix from
if line.startswith("From "):
continue
# this is continuation
elif line[0] in ' \t':
extend(headers, line)
else:
headers.append(line)
new_headers = deque()
for h in headers:
if isinstance(h, deque):
new_headers.append("".join(h).rstrip("\r\n"))
else:
new_headers.append(h.rstrip("\r\n"))
return new_headers
def extend(headers, line):
try:
header = headers.pop()
except IndexError:
# this means that we got invalid header
# ignore it
return
if isinstance(header, deque):
header.append(line)
headers.append(header)
else:
headers.append(deque((header, line)))
def split2(header):
pair = header.split(":", 1)
if len(pair) == 2:
return normalize(pair[0].rstrip()), pair[1].lstrip()
else:
return (None, None)
|
glyph/flanker
|
flanker/mime/message/headers/parsing.py
|
Python
|
apache-2.0
| 3,323
|
from typing import List, Optional, Union
from great_expectations import DataContext
from great_expectations.core.batch import Batch, BatchRequest
from great_expectations.execution_engine.execution_engine import MetricDomainTypes
from great_expectations.rule_based_profiler.domain_builder import DomainBuilder
from great_expectations.rule_based_profiler.types import (
Domain,
ParameterContainer,
SemanticDomainTypes,
)
from great_expectations.validator.metric_configuration import MetricConfiguration
class MyCustomSemanticTypeColumnDomainBuilder(DomainBuilder):
"""
This custom DomainBuilder defines and filters for "user_id" semantic type fields
"""
def __init__(
self,
data_context: DataContext,
batch_list: Optional[List[Batch]] = None,
batch_request: Optional[Union[BatchRequest, dict]] = None,
semantic_types: Optional[
Union[str, SemanticDomainTypes, List[Union[str, SemanticDomainTypes]]]
] = None,
column_name_suffixes: Optional[List[str]] = None,
):
super().__init__(
data_context=data_context,
batch_list=batch_list,
batch_request=batch_request,
)
if semantic_types is None:
semantic_types = ["user_id"]
self._semantic_types = semantic_types
if column_name_suffixes is None:
column_name_suffixes = [
"_id",
]
self._column_name_suffixes = column_name_suffixes
@property
def domain_type(self) -> Union[str, MetricDomainTypes]:
return MetricDomainTypes.COLUMN
@property
def semantic_types(
self,
) -> Optional[
Union[str, SemanticDomainTypes, List[Union[str, SemanticDomainTypes]]]
]:
return self._semantic_types
@property
def column_name_suffixes(self) -> Optional[List[str]]:
return self._column_name_suffixes
def _get_domains(
self,
variables: Optional[ParameterContainer] = None,
) -> List[Domain]:
"""
Find the semantic column type for each column and return all domains matching the specified type or types.
"""
batch_ids: List[str] = self.get_batch_ids(variables=variables)
table_column_names: List[str] = self.get_validator(
variables=variables
).get_metric(
metric=MetricConfiguration(
metric_name="table.columns",
metric_domain_kwargs={
"batch_id": batch_ids[-1], # active_batch_id
},
metric_value_kwargs=None,
metric_dependencies=None,
)
)
# First check the column name ends in "_id".
candidate_column_names: List[str] = list(
filter(
lambda candidate_column_name: candidate_column_name.endswith(
tuple(self.column_name_suffixes)
),
table_column_names,
)
)
column_name: str
domains: List[Domain] = [
Domain(
domain_type=MetricDomainTypes.COLUMN,
domain_kwargs={
"column": column_name,
},
)
for column_name in candidate_column_names
]
return domains
|
great-expectations/great_expectations
|
tests/test_fixtures/rule_based_profiler/plugins/my_custom_semantic_type_column_domain_builder.py
|
Python
|
apache-2.0
| 3,360
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities used to capture Python idioms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Undefined(object):
"""Represents an undefined symbol in Python.
This is used to reify undefined symbols, which is required to use the
functional form of loops.
Example:
while n > 0:
n = n - 1
s = n
return s # Runtime error if n == 0
This is valid Python code and will not result in an error as long as n
is positive. The use of this class is to stay as close to Python semantics
as possible for staged code of this nature.
Converted version of the above showing the possible usage of this class:
s = Undefined('s')
init_state = (s,)
s = while_loop(cond, body, init_state)
return s # s is an instance of Undefined if the loop never runs
Attributes:
symbol_name: Text, identifier for the undefined symbol
"""
__slots__ = ('symbol_name',)
def __init__(self, symbol_name):
self.symbol_name = symbol_name
def __repr__(self):
return self.symbol_name
def __getattribute__(self, name):
try:
# If it's an existing attribute, return it.
return object.__getattribute__(self, name)
except AttributeError:
# Otherwise return Undefined.
return self
def __getitem__(self, i):
return self
def is_undefined(value):
"""Checks whether Autograph has determined that a given value is undefined.
This only works in places where Autograph reifies undefined symbols. Note that
if this function is passed a truly undefined symbol the call-site will raise
NameError.
Args:
value: value to test for undefinedness
Returns:
Boolean, whether the input value is undefined.
"""
return isinstance(value, Undefined)
# TODO(mdan): Refactor as a RetVal object, aggregating the value and do_return.
class UndefinedReturnValue(object):
"""Represents a default return value from a function (None in Python)."""
pass
def retval(value):
"""Returns the actual value that a return statement should produce."""
if isinstance(value, UndefinedReturnValue):
return None
return value
def is_undefined_return(value):
"""Checks whether `value` is the default return value."""
return isinstance(value, UndefinedReturnValue)
|
gunan/tensorflow
|
tensorflow/python/autograph/operators/special_values.py
|
Python
|
apache-2.0
| 3,006
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
class Dream(models.Model):
user = models.ForeignKey(User)
date = models.DateField()
title = models.CharField(max_length=100)
content = models.TextField()
feedback = models.TextField(null=True, blank=True)
pubtime = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return u'%d %s %s %s' % (self.id, self.title, self.content, self.feedback)
|
kwailamchan/programming-languages
|
python/django/elf/dailydream/dailydream/dreams/models.py
|
Python
|
mit
| 512
|
#!/usr/bin/python3
"""
Cleans-up Sphinx-only constructs (ie from README.rst),
so that *PyPi* can format it properly.
To check for remaining errors, install ``sphinx`` and run::
python setup.py --long-description | sed -file 'this_file.sed' | rst2html.py --halt=warning
"""
import re
import sys, io
def yield_sphinx_only_markup(lines):
"""
:param file_inp: a `filename` or ``sys.stdin``?
:param file_out: a `filename` or ``sys.stdout`?`
"""
substs = [
## Selected Sphinx-only Roles.
#
(r':abbr:`([^`]+)`', r'\1'),
(r':ref:`([^`]+)`', r'`\1`_'),
(r':term:`([^`]+)`', r'**\1**'),
(r':dfn:`([^`]+)`', r'**\1**'),
(r':(samp|guilabel|menuselection):`([^`]+)`', r'``\2``'),
## Sphinx-only roles:
# :foo:`bar` --> foo(``bar``)
# :a:foo:`bar` XXX afoo(``bar``)
#
#(r'(:(\w+))?:(\w+):`([^`]*)`', r'\2\3(``\4``)'),
(r':(\w+):`([^`]*)`', r'\1(``\2``)'),
## Sphinx-only Directives.
#
(r'\.\. doctest', r'code-block'),
(r'\.\. plot::', r'.. '),
(r'\.\. seealso', r'info'),
(r'\.\. glossary', r'rubric'),
(r'\.\. figure::', r'.. '),
## Other
#
(r'\|version\|', r'x.x.x'),
]
regex_subs = [ (re.compile(regex, re.IGNORECASE), sub) for (regex, sub) in substs ]
def clean_line(line):
try:
for (regex, sub) in regex_subs:
line = regex.sub(sub, line)
except Exception as ex:
print("ERROR: %s, (line(%s)"%(regex, sub))
raise ex
return line
for line in lines:
yield clean_line(line)
|
iSTB/python-schemata
|
rst_cleaner.py
|
Python
|
mit
| 1,813
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import os
import sys
this_dir = os.path.dirname(os.path.abspath(__file__))
trunk_dir = os.path.split(this_dir)[0]
sys.path.insert(0,trunk_dir)
from ikol.config import Config
from ikol.directory import Directorio
config = Config()
def moveToDest(listfiles,dircache):
# Mover los archivos correctos al directorio destino
D = Directorio(dircache)
dest = os.path.join(config.getFinalDir(),os.path.basename(dircache))
for i in listfiles:
if D.FileinDir(i):
i = D.FileinDir(i)
# Renombrar el archivo para Filtrar Caracteres non-ascii
i = D.CleanName(i)
try:
print "Moviendo "+ D.path +" hacia " + dest
D.moveToDest(i,dest)
except Exception, e:
#TODO : hacer algo con este error decode ascii en print
print e
else:
print "Error Al buscar el archivo " + i
if __name__ == '__main__':
lst = os.listdir(config.getCacheDir())
for i in lst:
print i
if os.path.isdir(os.path.join(config.getCacheDir(),i)):
files = os.listdir(os.path.join(config.getCacheDir(),i))
print files
moveToDest(files,os.path.join(config.getCacheDir(),i))
|
lokiteitor/ikol
|
test/moveTest.py
|
Python
|
gpl-2.0
| 1,301
|
#!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run tests using docker images in Google Container Registry per matrix."""
from __future__ import print_function
import argparse
import atexit
import json
import multiprocessing
import os
import re
import subprocess
import sys
import uuid
# Langauage Runtime Matrix
import client_matrix
python_util_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../run_tests/python_utils'))
sys.path.append(python_util_dir)
import dockerjob
import jobset
import report_utils
import upload_test_results
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(
list(
set(
client_matrix.get_release_tag_name(info)
for lang in client_matrix.LANG_RELEASE_MATRIX.values()
for info in lang)))
_TEST_TIMEOUT = 30
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
argp.add_argument(
'--gcr_path',
default='gcr.io/grpc-testing',
help='Path of docker images in Google Container Registry')
argp.add_argument(
'--release',
default='all',
choices=['all', 'master'] + _RELEASES,
help='Release tags to test. When testing all '
'releases defined in client_matrix.py, use "all".')
argp.add_argument(
'-l',
'--language',
choices=['all'] + sorted(_LANGUAGES),
nargs='+',
default=['all'],
help='Languages to test')
argp.add_argument(
'--keep',
action='store_true',
help='keep the created local images after finishing the tests.')
argp.add_argument(
'--report_file', default='report.xml', help='The result file to create.')
argp.add_argument(
'--allow_flakes',
default=False,
action='store_const',
const=True,
help=('Allow flaky tests to show as passing (re-runs failed '
'tests up to five times)'))
argp.add_argument(
'--bq_result_table',
default='',
type=str,
nargs='?',
help='Upload test results to a specified BQ table.')
argp.add_argument(
'--server_host',
default='74.125.206.210',
type=str,
nargs='?',
help='The gateway to backend services.')
args = argp.parse_args()
print(str(args))
def find_all_images_for_lang(lang):
"""Find docker images for a language across releases and runtimes.
Returns dictionary of list of (<tag>, <image-full-path>) keyed by runtime.
"""
# Find all defined releases.
if args.release == 'all':
releases = ['master'] + client_matrix.get_release_tags(lang)
else:
# Look for a particular release.
if args.release not in ['master'
] + client_matrix.get_release_tags(lang):
jobset.message(
'SKIPPED',
'%s for %s is not defined' % (args.release, lang),
do_newline=True)
return {}
releases = [args.release]
# Images tuples keyed by runtime.
images = {}
for runtime in client_matrix.LANG_RUNTIME_MATRIX[lang]:
image_path = '%s/grpc_interop_%s' % (args.gcr_path, runtime)
output = subprocess.check_output([
'gcloud', 'beta', 'container', 'images', 'list-tags',
'--format=json', image_path
])
docker_image_list = json.loads(output)
# All images should have a single tag or no tag.
# TODO(adelez): Remove tagless images.
tags = [i['tags'][0] for i in docker_image_list if i['tags']]
jobset.message(
'START',
'Found images for %s: %s' % (image_path, tags),
do_newline=True)
skipped = len(docker_image_list) - len(tags)
jobset.message(
'SKIPPED',
'Skipped images (no-tag/unknown-tag): %d' % skipped,
do_newline=True)
# Filter tags based on the releases.
images[runtime] = [(tag, '%s:%s' % (image_path, tag))
for tag in tags
if tag in releases]
return images
# caches test cases (list of JobSpec) loaded from file. Keyed by lang and runtime.
def find_test_cases(lang, runtime, release, suite_name):
"""Returns the list of test cases from testcase files per lang/release."""
file_tmpl = os.path.join(os.path.dirname(__file__), 'testcases/%s__%s')
testcase_release = release
filename_prefix = lang
if lang == 'csharp':
filename_prefix = runtime
if not os.path.exists(file_tmpl % (filename_prefix, release)):
testcase_release = 'master'
testcases = file_tmpl % (filename_prefix, testcase_release)
job_spec_list = []
try:
with open(testcases) as f:
# Only line start with 'docker run' are test cases.
for line in f.readlines():
if line.startswith('docker run'):
m = re.search('--test_case=(.*)"', line)
shortname = m.group(1) if m else 'unknown_test'
m = re.search(
'--server_host_override=(.*).sandbox.googleapis.com',
line)
server = m.group(1) if m else 'unknown_server'
# If server_host arg is not None, replace the original
# server_host with the one provided or append to the end of
# the command if server_host does not appear originally.
if args.server_host:
if line.find('--server_host=') > -1:
line = re.sub('--server_host=[^ ]*',
'--server_host=%s' % args.server_host,
line)
else:
line = '%s --server_host=%s"' % (line[:-1],
args.server_host)
print(line)
spec = jobset.JobSpec(
cmdline=line,
shortname='%s:%s:%s:%s' % (suite_name, lang, server,
shortname),
timeout_seconds=_TEST_TIMEOUT,
shell=True,
flake_retries=5 if args.allow_flakes else 0)
job_spec_list.append(spec)
jobset.message(
'START',
'Loaded %s tests from %s' % (len(job_spec_list), testcases),
do_newline=True)
except IOError as err:
jobset.message('FAILED', err, do_newline=True)
return job_spec_list
_xml_report_tree = report_utils.new_junit_xml_tree()
def run_tests_for_lang(lang, runtime, images):
"""Find and run all test cases for a language.
images is a list of (<release-tag>, <image-full-path>) tuple.
"""
total_num_failures = 0
for image_tuple in images:
release, image = image_tuple
jobset.message('START', 'Testing %s' % image, do_newline=True)
# Download the docker image before running each test case.
subprocess.check_call(['gcloud', 'docker', '--', 'pull', image])
suite_name = '%s__%s_%s' % (lang, runtime, release)
job_spec_list = find_test_cases(lang, runtime, release, suite_name)
if not job_spec_list:
jobset.message(
'FAILED', 'No test cases were found.', do_newline=True)
return 1
num_failures, resultset = jobset.run(
job_spec_list,
newline_on_success=True,
add_env={'docker_image': image},
maxjobs=args.jobs)
if args.bq_result_table and resultset:
upload_test_results.upload_interop_results_to_bq(
resultset, args.bq_result_table, args)
if num_failures:
jobset.message('FAILED', 'Some tests failed', do_newline=True)
total_num_failures += num_failures
else:
jobset.message('SUCCESS', 'All tests passed', do_newline=True)
report_utils.append_junit_xml_results(_xml_report_tree, resultset,
'grpc_interop_matrix', suite_name,
str(uuid.uuid4()))
if not args.keep:
cleanup(image)
return total_num_failures
def cleanup(image):
jobset.message('START', 'Cleanup docker image %s' % image, do_newline=True)
dockerjob.remove_image(image, skip_nonexistent=True)
languages = args.language if args.language != ['all'] else _LANGUAGES
total_num_failures = 0
for lang in languages:
docker_images = find_all_images_for_lang(lang)
for runtime in sorted(docker_images.keys()):
total_num_failures += run_tests_for_lang(lang, runtime,
docker_images[runtime])
report_utils.create_xml_report_file(_xml_report_tree, args.report_file)
if total_num_failures:
sys.exit(1)
sys.exit(0)
|
murgatroid99/grpc
|
tools/interop_matrix/run_interop_matrix_tests.py
|
Python
|
apache-2.0
| 9,599
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright(c) 2013 Krister Hedfors
#
# Known bugs:
# Debian Squeeze, apt-get:ed python-gevent
# * some DNS queries for nonexistent hosts
# are not cancelled by -w, but takes a long time to complete.
#
__all__ = ['XInfoTool']
import sys
import os
import codecs
import urlparse
from xnet.tools import Tool
from xnet.net.ipv4 import PortRangeIterator
from xnet.net.http import UrlIterator
#from xnet.debug import pdb
import xnet.packages.urllib3
import xnet.packages.urllib3.exceptions
class XInfoException(Exception):
pass
class XInfoTool(Tool):
__toolname__ = 'xinfo'
__itemname__ = 'url'
__description__ = 'get information from various external sources'
cmdline_options = [
('', '--tag-lines', 'prefix each line with current url',
dict(dest='tag_lines', action='store_true')),
]
@classmethod
def print_source(cls):
cls._print_source(__file__)
MAX_REDIRECTS = 8
def __init__(self, *args, **kw):
super(XInfoTool, self).__init__(*args, **kw)
if not hasattr(self, '_http'):
self._http = self.__class__._http = self._get_connection()
raise Exception('Work in progress')
def _get_connection(self):
return xnet.packages.urllib3.PoolManager()
def _request(self, *args, **kw):
result = None
done = False
while not done:
try:
result = self._http.request(*args, **kw)
except xnet.packages.urllib3.exceptions.ClosedPoolError:
pass # urllib3 seems to take care of this
else:
done = True
return result
def __parse__(self, ip, iterator):
url = url.strip()
result = {'url': url}
return result
def _get_host(self, url):
return urlparse.urlparse(url).netloc
def __action__(self, parse_result):
result = {}
_url = self._url = parse_result['url']
_host = self.options.host or self._get_host(_url)
_method = self.options.method or 'GET'
_response = None
_body_unicode = None
_code = None
_msg = None
_body = ''
#
#proxies = {}
#
request_kwargs = {}
request_kwargs['redirect'] = False
import pdb; pdb.set_trace() ### XXX BREAKPOINT
if self.options.host:
request_kwargs['headers'] = {'Host': self.options.host}
_host = self.options.host
read_response = not self.options.code \
and not self.options.server
if self.options.format and '{body}' in self.options.format:
read_response = True
done = False
redirect_count = 0
#
# loop to handle redirects
#
while not done:
try:
_response = self._request(_method, _url, **request_kwargs)
except xnet.tools.WaitTimeout:
raise
except xnet.packages.urllib3.exceptions.MaxRetryError, e:
if e.reason and e.reason.strerror:
self.set_error(e.reason.strerror, oneword=True)
else:
self.set_error(e)
return None
except:
import traceback
print traceback.print_exc(sys.exc_info()[1])
e = str(sys.exc_info())
self.set_error(e)
self.stderr = True
return None
_code = _response.status
_msg = _response.reason
if not _code in [301, 302, 303, 307]:
done = True
else:
if redirect_count == self.MAX_REDIRECTS:
self.set_error('redirect-limit-reached')
return None
redirect_count += 1
location = _response.getheaders().get('location', None)
if location is None or location.strip() == '':
self.set_error('redirect-empty-location')
return None
ourl = urlparse.urlparse(_url)
lurl = urlparse.urlparse(location)
if (lurl.scheme == '' or lurl.scheme == ourl.scheme) \
and (lurl.netloc == '' or lurl.netloc == ourl.netloc) \
and lurl.path == ourl.path:
self.set_error('redirect-loop')
return None
if not self._explicitly_allowed_redirect(ourl, lurl):
if lurl.netloc and lurl.netloc.lower() != ourl.netloc.lower():
msg = 'redirect-host-mismatch {0}'.format(location)
self.set_error(msg)
return None
if ourl.scheme.lower() == 'http' and lurl.scheme.lower() == 'https':
#
# Allow http to https but nothing else.
#
pass
elif lurl.scheme and lurl.scheme.lower() != ourl.scheme.lower():
msg = 'redirect-host-mismatch {0}'.format(location)
self.set_error(msg)
return None
#
# Create new url by updating path, query and fragment.
#
scheme, netloc, path, query, fragment = \
urlparse.urlsplit(_url)
parts = (
lurl.scheme or scheme,
lurl.netloc or netloc,
lurl.path,
lurl.query,
lurl.fragment
)
_url = urlparse.urlunsplit(parts)
if read_response:
_body = _response.data
content_type = _response.headers.get('content-type', None)
charset = None
if content_type:
w = content_type.split(';')
content_type = w[0]
for s in w[1:]:
s = s.strip()
if s.startswith('charset='):
charset = s.split('charset=', 1)[1]
if charset:
_body_unicode = codecs.decode(_body, charset, 'ignore')
else:
_body_unicode = codecs.decode(_body, 'Latin-1', 'ignore')
#_body_unicode = unicode(_body)
if read_response and self.options.tag_lines:
tag = self._url + ':'
_body = '\n' + tag + _body.replace('\n', '\n' + tag)
result['host'] = _host
result['url'] = _url
result['response'] = _response
result['code'] = _code
result['msg'] = _msg
result['body'] = _body
result['body_unicode'] = _body_unicode
return result
def __format__(self, line, parse_result, action_result):
options = self.options
url = action_result['url']
response = action_result['response']
code = action_result['code']
msg = action_result['msg']
output = ''
#
if options.server:
server = response.headers.get('server', None)
output = '{0} {1}'.format(url, server)
elif options.code:
if options.verbose:
output = '{0} {1} {2}'.format(url, code, msg)
else:
output = '{0} {1}'.format(url, code)
elif options.xpath:
try:
import lxml.etree
except ImportError:
import time
errmsg = '\n'
errmsg += '*** XNET USAGE INFORMATION ***\n'
errmsg += '\n'
errmsg += 'ImportError raised, lxml.etree might be missing.\n'
errmsg += 'For Debian, try:\n'
errmsg += ' $ sudo apt-get install python-lxml\n'
errmsg += 'For OSX, try:\n'
errmsg += ' $ sudo port install py-lxml\n'
print errmsg
time.sleep(3)
raise
htmlparser = lxml.etree.HTMLParser()
tree = lxml.etree.fromstring(action_result['body_unicode'], htmlparser)
for xpath in self.options.xpath:
xpath_result = tree.xpath(xpath)
success = False
for elem in xpath_result:
success = True
if type(elem) is lxml.etree._ElementStringResult:
elem = str(elem)
if not type(elem) is str:
elem = lxml.etree.tostring(elem)
output += elem
if len(output) and output[-1] != '\n':
output += '\n'
if self.options.xpath_strict and not success:
import tempfile
(fd, name) = tempfile.mkstemp(prefix='webget-xpath-fail-')
os.write(fd, action_result['body'])
os.close(fd)
msg = 'Error: XPath not matched: {0}'.format(xpath)
msg += ', body saved to {0}'.format(name)
sys.exit(msg)
output += self.options.xpath_delimiter or ' '
else:
output = action_result['body']
return output
@classmethod
def __format_help__(cls):
output = '''
Format variables for %s:
Default format depend on commandline arguments.
''' % cls.__toolname__
return output
def __timeout__(self):
return 'timeout {0}\n'.format(self._url)
def main():
import xnet.tools.run
xnet.tools.run.run(XInfoTool)
if __name__ == "__main__":
main()
#doctest.testmod()
#unittest.main()
|
kristerhedfors/xnet
|
xnet/tools/xinfo.py
|
Python
|
bsd-3-clause
| 9,752
|
__author__ = 'phillip'
from abc import ABCMeta, abstractmethod
class MailReceiver:
__metaclass__ = ABCMeta
@abstractmethod
def connect(self, config):
pass
@abstractmethod
def can_create_folder(self):
"""True if this mail access protocol supports the creation of folders on the server
"""
pass
@abstractmethod
def change_folder(self,path):
"""Returns True if the change to the folder path was successfully. If path starts with a / it is a absolut path
"""
pass
@abstractmethod
def create_folder(self, name):
"""Creates a folder in the current directory. can_create_folder must be true. Returns True on success.
"""
pass
@abstractmethod
def delete_folder(self, name):
""" Deletes the folder name
"""
pass
@abstractmethod
def list_folders(self):
"""Returns the names of all subfolders
"""
pass
@abstractmethod
def get_number_of_mails(self):
"""Returns the number of mails in this folder
"""
pass
@abstractmethod
def get_mail(self, n):
"""Returns the mail number n
"""
pass
@abstractmethod
def get_header(self, n):
"""Returns the header of the mail number n
"""
pass
@abstractmethod
def get_mailbox_size(self):
"""Returns the size of the mailbox
"""
pass
@abstractmethod
def get_total_mails(self):
"""Returns the number of all mails in the mailbox
"""
pass
@abstractmethod
def delete_mail(self, n):
"""Returns True if mail n was successfully marked for deletion
"""
pass
@abstractmethod
def quit(self):
pass
|
grafgustav/accessmail
|
src/Service/MailReceiver.py
|
Python
|
mit
| 1,806
|
import os
from rsqueakvm import display, wrapper
from rsqueakvm.error import PrimitiveFailedError
from rsqueakvm.model.display import W_DisplayBitmap
from rsqueakvm.model.pointers import W_PointersObject
from rsqueakvm.model.variable import W_WordsObject
from rsqueakvm.primitives import expose_primitive, assert_class, index1_0
from rsqueakvm.primitives.constants import *
from rpython.rlib import jit
# ___________________________________________________________________________
# File primitives (150-169)
# (XXX they are obsolete in Squeak and done with a plugin)
@expose_primitive(FILE_CLOSE, unwrap_spec=[object, int])
def func(interp, s_frame, w_rcvr, fd):
try:
os.close(fd)
except OSError:
raise PrimitiveFailedError()
return w_rcvr
@expose_primitive(FILE_OPEN, unwrap_spec=[object, str, object])
def func(interp, s_frame, w_rcvr, filename, w_writeable_flag):
if w_writeable_flag.is_same_object(interp.space.w_true):
mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
else:
mode = os.O_RDONLY
try:
fd = os.open(filename, mode, 0666)
except OSError:
raise PrimitiveFailedError()
return interp.space.wrap_int(fd)
@expose_primitive(FILE_WRITE, unwrap_spec=[object, int, str, int, int])
def func(interp, s_frame, w_rcvr, fd, src, start, count):
start = start - 1
end = start + count
if end < 0 or start < 0:
raise PrimitiveFailedError()
try:
os.write(fd, src[start:end])
except OSError:
raise PrimitiveFailedError()
return w_rcvr
@expose_primitive(DIRECTORY_DELIMITOR, unwrap_spec=[object])
def func(interp, s_frame, _):
return interp.space.wrap_char(os.path.sep)
# ___________________________________________________________________________
# I/O Primitives
@expose_primitive(MOUSE_POINT, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
x, y = interp.space.display().mouse_point()
w_point = W_PointersObject(interp.space, interp.space.w_Point, 2)
w_point.store(interp.space, 0, interp.space.wrap_int(x))
w_point.store(interp.space, 1, interp.space.wrap_int(y))
return w_point
@expose_primitive(GET_NEXT_EVENT, unwrap_spec=[object, object])
@jit.unroll_safe
@jit.look_inside
def func(interp, s_frame, w_rcvr, w_into):
if not interp.evented:
raise PrimitiveFailedError()
try:
ary = interp.space.display().get_next_event(time=interp.event_time_now())
except display.SqueakInterrupt, e:
w_interrupt_sema = interp.space.w_interrupt_semaphore()
if w_interrupt_sema is not interp.space.w_nil:
assert_class(interp, w_interrupt_sema, interp.space.w_Semaphore)
wrapper.SemaphoreWrapper(interp.space, w_interrupt_sema).signal(s_frame)
else:
raise e
else:
for i in range(8):
w_into.store(interp.space, i, interp.space.wrap_int(ary[i]))
# XXX - hack
if (ary[0] == display.WindowEventMetricChange and
ary[4] > 0 and ary[5] > 0):
if interp.image:
interp.image.lastWindowSize = ((ary[4] & 0xffff) << 16) | (ary[5] & 0xffff)
return w_rcvr
@expose_primitive(BITBLT_COPY_BITS, clean_stack=False, no_result=True,
compiled_method=True)
def func(interp, s_frame, argcount, w_method):
w_name = interp.space.wrap_string("primitiveCopyBits")
signature = ("BitBltPlugin", "primitiveCopyBits")
from rsqueakvm.plugins.simulation import simulationPlugin
return simulationPlugin.simulate(w_name, signature, interp, s_frame, argcount, w_method)
@expose_primitive(SNAPSHOT, clean_stack=False, no_result=True)
def func(interp, s_frame, argcount):
s_frame.pop_n(argcount)
s_frame.push(interp.space.w_true)
# leaving true on the frame as return value for resuming image
from rsqueakvm.squeakimage import SpurImageWriter
from rsqueakvm.constants import SYSTEM_ATTRIBUTE_IMAGE_NAME_INDEX
filename = interp.space.get_system_attribute(SYSTEM_ATTRIBUTE_IMAGE_NAME_INDEX)
SpurImageWriter(interp, filename).trace_image(s_frame)
s_frame.pop()
s_frame.push(interp.space.w_false) # the non-resuming image gets false
@expose_primitive(BE_CURSOR)
def func(interp, s_frame, argcount):
if not (0 <= argcount <= 1):
raise PrimitiveFailedError()
w_rcvr = s_frame.peek(argcount)
if interp.space.headless.is_set():
# we don't do any cursoration if we're headless
return w_rcvr
mask_words = None
if argcount == 1:
w_mask = s_frame.peek(0)
if isinstance(w_mask, W_WordsObject):
mask_words = w_mask.words
elif isinstance(w_mask, W_PointersObject):
# mask is a form object
w_contents = w_mask.fetch(interp.space, 0)
if isinstance(w_contents, W_WordsObject):
mask_words = w_contents.words
else:
raise PrimitiveFailedError
else:
raise PrimitiveFailedError()
w_bitmap = w_rcvr.fetch(interp.space, 0)
if not isinstance(w_bitmap, W_WordsObject):
raise PrimitiveFailedError()
width = interp.space.unwrap_int(w_rcvr.fetch(interp.space, 1))
height = interp.space.unwrap_int(w_rcvr.fetch(interp.space, 2))
depth = interp.space.unwrap_int(w_rcvr.fetch(interp.space, 3))
hotpt = wrapper.PointWrapper(interp.space, w_rcvr.fetch(interp.space, 4))
offx = hotpt.x()
offy = hotpt.y()
if not (width == 16 and height == 16 and depth == 1 and
offx >= -16 and offy >= -16 and
offx <= 0 and offy <= 0):
raise PrimitiveFailedError
offx = -offx
offy = -offy
display.SDLCursor.set(w_bitmap.words, width, height, offx, offy,
mask_words=mask_words)
# Don't fail if the Cursor could not be set.
# It is surely annoying but no reason to not continue.
s_frame.pop_n(argcount + 1)
return w_rcvr
@expose_primitive(BE_DISPLAY, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
if interp.space.headless.is_set():
s_frame.exitFromHeadlessExecution()
if not isinstance(w_rcvr, W_PointersObject) or w_rcvr.size() < 4:
raise PrimitiveFailedError
old_display = interp.space.w_display()
if isinstance(old_display, W_DisplayBitmap):
old_display.relinquish_display()
interp.space.set_w_display(w_rcvr)
form = wrapper.FormWrapper(interp.space, w_rcvr)
form.take_over_display()
w_display_bitmap = form.get_display_bitmap()
w_display_bitmap.take_over_display()
w_display_bitmap.flush_to_screen()
if interp.image:
interp.image.lastWindowSize = (form.width() << 16) + form.height()
return w_rcvr
@jit.look_inside_iff(lambda space, start, stop, repOff, w_rcvr, w_replacement: (
jit.isconstant(stop) and jit.isconstant(start) and
jit.isconstant(repOff) and (stop - start < 13))) # heuristic
def _replace_from_to(space, start, stop, repOff, w_rcvr, w_replacement):
for i0 in range(start, stop + 1):
w_rcvr.atput0(space, i0, w_replacement.at0(space, repOff + i0))
@expose_primitive(REPLACE_FROM_TO, unwrap_spec=[object, index1_0, index1_0, object, index1_0])
def func(interp, s_frame, w_rcvr, start, stop, w_replacement, repStart):
"""replaceFrom: start to: stop with: replacement startingAt: repStart
Primitive. This destructively replaces elements from start to stop in the
receiver starting at index, repStart, in the collection, replacement. Answer
the receiver. Range checks are performed in the primitive only. Essential
for Pharo Candle Symbols.
| index repOff |
repOff := repStart - start.
index := start - 1.
[(index := index + 1) <= stop]
whileTrue: [self at: index put: (replacement at: repOff + index)]"""
if (start < 0 or start - 1 > stop or repStart < 0):
raise PrimitiveFailedError()
if (w_rcvr.varsize() <= stop or w_replacement.varsize() <= repStart + (stop - start)):
raise PrimitiveFailedError()
repOff = repStart - start
_replace_from_to(interp.space, start, stop, repOff, w_rcvr, w_replacement)
return w_rcvr
@expose_primitive(SCREEN_SIZE, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
w_res = interp.space.w_Point.as_class_get_shadow(interp.space).new()
point = wrapper.PointWrapper(interp.space, w_res)
display = interp.space.display()
if display.width == 0:
# We need to have the indirection via interp.image, because when the image
# is saved, the display form size is always reduced to 240@120.
if not interp.image:
raise PrimitiveFailedError
display.width = (interp.image.lastWindowSize >> 16) & 0xffff
display.height = interp.image.lastWindowSize & 0xffff
point.store_x(display.width)
point.store_y(display.height)
return w_res
@expose_primitive(MOUSE_BUTTONS, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
btn = interp.space.display().mouse_button()
return interp.space.wrap_int(btn)
@expose_primitive(KBD_NEXT, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
code = interp.space.display().next_keycode()
if code & 0xFF == 0:
return interp.space.w_nil
else:
return interp.space.wrap_int(code)
@expose_primitive(KBD_PEEK, unwrap_spec=[object])
def func(interp, s_frame, w_rcvr):
code = interp.space.display().peek_keycode()
# TODO: how do old images handle CmdDot? See INTERRUPT_SEMAPHORE?
if code & 0xFF == 0:
return interp.space.w_nil
else:
return interp.space.wrap_int(code)
|
HPI-SWA-Lab/RSqueak
|
rsqueakvm/primitives/input_output.py
|
Python
|
bsd-3-clause
| 9,607
|
#Scratch_n_sketch fan demo app
from libs.board import *
s = scratch_n_sketch()
#auto connect scratch_n_sketch
s.connect()
s.backGroundColor(0, 0, 0)
s.textBackColor(0, 0, 0)
s.penColor(255, 255, 0)
s.setFont(Font.terminal)
for i in range(1000):
#get sensor data
s.getSensorData()
console(s.A1)
wait(50)
#check the distance sensor value
if s.A1 >= 90:
#set buzzer on
s.digitalWrite(s.Do1, On)
s.drawText("INTRUDER ...", 40, 140)
wait(50)
#set rgb led to red, with 10% brightness
s.rgbLed(255, 0, 0, 10)
else:
#dset buzzer off
s.digitalWrite(s.Do1, Off)
s.drawText("SCANNING ...", 40, 140)
#wait 50ms
wait(50)
#set rgb led to green, with 10% brightness
s.rgbLed(0, 255, 0, 10)
wait(100)
#disconnect scratch_n_sketch
s.disconnect()
|
warefab/scratch-n-sketch
|
python/digital_out.py
|
Python
|
mit
| 866
|
"""Add adminrealm to policies
Revision ID: 4d9178fa8336
Revises: e5cbeb7c177
Create Date: 2015-06-15 13:58:35.377862
"""
# revision identifiers, used by Alembic.
revision = '4d9178fa8336'
down_revision = 'e5cbeb7c177'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.exc import OperationalError, ProgrammingError, InternalError
def upgrade():
try:
op.add_column('policy', sa.Column('adminrealm',
sa.Unicode(length=256),
nullable=True))
except (OperationalError, ProgrammingError, InternalError) as exx:
if "duplicate column name" in str(exx.orig).lower():
print("Good. Column adminrealm already exists.")
else:
print(exx)
except Exception as exx:
print("Could not add the column 'adminrealm' to table policy")
print(exx)
def downgrade():
op.drop_column('policy', 'adminrealm')
|
privacyidea/privacyidea
|
migrations/versions/4d9178fa8336_.py
|
Python
|
agpl-3.0
| 963
|
#!/usr/bin/env python3
#pylint: disable=missing-docstring
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import chigger
reader = chigger.exodus.ExodusReader('../../input/mug_blocks_out.e')
mug = chigger.exodus.ExodusResult(reader, block=None, variable='convected')
window = chigger.RenderWindow(mug, size=[300,300], test=True)
window.write('none.png')
window.start()
|
nuclear-wizard/moose
|
python/chigger/tests/exodus/blocks/none.py
|
Python
|
lgpl-2.1
| 629
|
# This code allows one to use SWIG wrapped objects from weave. This
# code is specific to SWIG-1.3 and above where things are different.
# The code is basically all copied out from the SWIG wrapper code but
# it has been hand edited for brevity.
#
# Prabhu Ramachandran <prabhu_r@users.sf.net>
from __future__ import absolute_import, print_function
######################################################################
# This is for SWIG-1.3.x where x < 22.
# Essentially, SWIG_RUNTIME_VERSION was not yet used.
swigptr2_code_v0 = """
#include "Python.h"
/*************************************************************** -*- c -*-
* python/precommon.swg
*
* Rename all exported symbols from common.swg, to avoid symbol
* clashes if multiple interpreters are included
*
************************************************************************/
#define SWIG_TypeCheck SWIG_Python_TypeCheck
#define SWIG_TypeCast SWIG_Python_TypeCast
#define SWIG_TypeName SWIG_Python_TypeName
#define SWIG_TypeQuery SWIG_Python_TypeQuery
#define SWIG_PackData SWIG_Python_PackData
#define SWIG_UnpackData SWIG_Python_UnpackData
/***********************************************************************
* common.swg
*
* This file contains generic SWIG runtime support for pointer
* type checking as well as a few commonly used macros to control
* external linkage.
*
* Author : David Beazley (beazley@cs.uchicago.edu)
*
* Copyright (c) 1999-2000, The University of Chicago
*
* This file may be freely redistributed without license or fee provided
* this copyright message remains intact.
************************************************************************/
#include <string.h>
#if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)
# if defined(_MSC_VER) || defined(__GNUC__)
# if defined(STATIC_LINKED)
# define SWIGEXPORT(a) a
# define SWIGIMPORT(a) extern a
# else
# define SWIGEXPORT(a) __declspec(dllexport) a
# define SWIGIMPORT(a) extern a
# endif
# else
# if defined(__BORLANDC__)
# define SWIGEXPORT(a) a _export
# define SWIGIMPORT(a) a _export
# else
# define SWIGEXPORT(a) a
# define SWIGIMPORT(a) a
# endif
# endif
#else
# define SWIGEXPORT(a) a
# define SWIGIMPORT(a) a
#endif
#ifdef SWIG_GLOBAL
# define SWIGRUNTIME(a) SWIGEXPORT(a)
#else
# define SWIGRUNTIME(a) static a
#endif
#ifdef __cplusplus
extern "C" {
#endif
typedef void *(*swig_converter_func)(void *);
typedef struct swig_type_info *(*swig_dycast_func)(void **);
typedef struct swig_type_info {
const char *name;
swig_converter_func converter;
const char *str;
void *clientdata;
swig_dycast_func dcast;
struct swig_type_info *next;
struct swig_type_info *prev;
} swig_type_info;
#ifdef SWIG_NOINCLUDE
SWIGIMPORT(swig_type_info *) SWIG_TypeCheck(char *c, swig_type_info *);
SWIGIMPORT(void *) SWIG_TypeCast(swig_type_info *, void *);
SWIGIMPORT(const char *) SWIG_TypeName(const swig_type_info *);
SWIGIMPORT(swig_type_info *) SWIG_TypeQuery(const char *);
SWIGIMPORT(char *) SWIG_PackData(char *, void *, int);
SWIGIMPORT(char *) SWIG_UnpackData(char *, void *, int);
#else
static swig_type_info *swig_type_list = 0;
/* Check the typename */
SWIGRUNTIME(swig_type_info *)
SWIG_TypeCheck(char *c, swig_type_info *ty) {
swig_type_info *s;
if (!ty) return 0; /* Void pointer */
s = ty->next; /* First element always just a name */
do {
if (strcmp(s->name,c) == 0) {
if (s == ty->next) return s;
/* Move s to the top of the linked list */
s->prev->next = s->next;
if (s->next) {
s->next->prev = s->prev;
}
/* Insert s as second element in the list */
s->next = ty->next;
if (ty->next) ty->next->prev = s;
ty->next = s;
s->prev = ty;
return s;
}
s = s->next;
} while (s && (s != ty->next));
return 0;
}
/* Cast a pointer up an inheritance hierarchy */
SWIGRUNTIME(void *)
SWIG_TypeCast(swig_type_info *ty, void *ptr) {
if ((!ty) || (!ty->converter)) return ptr;
return (*ty->converter)(ptr);
}
/* Return the name associated with this type */
SWIGRUNTIME(const char *)
SWIG_TypeName(const swig_type_info *ty) {
return ty->name;
}
/*
Compare two type names skipping the space characters, therefore
"char*" == "char *" and "Class<int>" == "Class<int >", etc.
Return 0 when the two name types are equivalent, as in
strncmp, but skipping ' '.
*/
static int
SWIG_TypeNameComp(const char *f1, const char *l1,
const char *f2, const char *l2) {
for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {
while ((*f1 == ' ') && (f1 != l1)) ++f1;
while ((*f2 == ' ') && (f2 != l2)) ++f2;
if (*f1 != *f2) return *f1 - *f2;
}
return (l1 - f1) - (l2 - f2);
}
/*
Check type equivalence in a name list like <name1>|<name2>|...
*/
static int
SWIG_TypeEquiv(const char *nb, const char *tb) {
int equiv = 0;
const char* te = tb + strlen(tb);
const char* ne = nb;
while (!equiv && *ne) {
for (nb = ne; *ne; ++ne) {
if (*ne == '|') break;
}
equiv = SWIG_TypeNameComp(nb, ne, tb, te) == 0;
if (*ne) ++ne;
}
return equiv;
}
/* Search for a swig_type_info structure */
SWIGRUNTIME(swig_type_info *)
SWIG_TypeQuery(const char *name) {
swig_type_info *ty = swig_type_list;
while (ty) {
if (ty->str && (SWIG_TypeEquiv(ty->str,name))) return ty;
if (ty->name && (strcmp(name,ty->name) == 0)) return ty;
ty = ty->prev;
}
return 0;
}
/* Pack binary data into a string */
SWIGRUNTIME(char *)
SWIG_PackData(char *c, void *ptr, int sz) {
static char hex[17] = "0123456789abcdef";
int i;
unsigned char *u = (unsigned char *) ptr;
register unsigned char uu;
for (i = 0; i < sz; i++,u++) {
uu = *u;
*(c++) = hex[(uu & 0xf0) >> 4];
*(c++) = hex[uu & 0xf];
}
return c;
}
/* Unpack binary data from a string */
SWIGRUNTIME(char *)
SWIG_UnpackData(char *c, void *ptr, int sz) {
register unsigned char uu = 0;
register int d;
unsigned char *u = (unsigned char *) ptr;
int i;
for (i = 0; i < sz; i++, u++) {
d = *(c++);
if ((d >= '0') && (d <= '9'))
uu = ((d - '0') << 4);
else if ((d >= 'a') && (d <= 'f'))
uu = ((d - ('a'-10)) << 4);
d = *(c++);
if ((d >= '0') && (d <= '9'))
uu |= (d - '0');
else if ((d >= 'a') && (d <= 'f'))
uu |= (d - ('a'-10));
*u = uu;
}
return c;
}
#endif
#ifdef __cplusplus
}
#endif
/***********************************************************************
* python.swg
*
* This file contains the runtime support for Python modules
* and includes code for managing global variables and pointer
* type checking.
*
* Author : David Beazley (beazley@cs.uchicago.edu)
************************************************************************/
#include "Python.h"
#ifdef __cplusplus
extern "C" {
#endif
#define SWIG_PY_INT 1
#define SWIG_PY_FLOAT 2
#define SWIG_PY_STRING 3
#define SWIG_PY_POINTER 4
#define SWIG_PY_BINARY 5
/* Flags for pointer conversion */
#define SWIG_POINTER_EXCEPTION 0x1
#define SWIG_POINTER_DISOWN 0x2
/* Exception handling in wrappers */
#define SWIG_fail goto fail
/* Constant information structure */
typedef struct swig_const_info {
int type;
char *name;
long lvalue;
double dvalue;
void *pvalue;
swig_type_info **ptype;
} swig_const_info;
/* Common SWIG API */
#define SWIG_ConvertPtr(obj, pp, type, flags) \
SWIG_Python_ConvertPtr(obj, pp, type, flags)
#define SWIG_NewPointerObj(p, type, flags) \
SWIG_Python_NewPointerObj(p, type, flags)
#define SWIG_MustGetPtr(p, type, argnum, flags) \
SWIG_Python_MustGetPtr(p, type, argnum, flags)
typedef double (*py_objasdbl_conv)(PyObject *obj);
#ifdef SWIG_NOINCLUDE
SWIGIMPORT(int) SWIG_Python_ConvertPtr(PyObject *, void **, swig_type_info *, int);
SWIGIMPORT(PyObject *) SWIG_Python_NewPointerObj(void *, swig_type_info *,int own);
SWIGIMPORT(void *) SWIG_Python_MustGetPtr(PyObject *, swig_type_info *, int, int);
#else
/* Convert a pointer value */
SWIGRUNTIME(int)
SWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {
swig_type_info *tc;
char *c = 0;
static PyObject *SWIG_this = 0;
int newref = 0;
PyObject *pyobj = 0;
if (!obj) return 0;
if (obj == Py_None) {
*ptr = 0;
return 0;
}
#ifdef SWIG_COBJECT_TYPES
if (!(PyCObject_Check(obj))) {
if (!SWIG_this)
SWIG_this = PyString_FromString("this");
pyobj = obj;
obj = PyObject_GetAttr(obj,SWIG_this);
newref = 1;
if (!obj) goto type_error;
if (!PyCObject_Check(obj)) {
Py_DECREF(obj);
goto type_error;
}
}
*ptr = PyCObject_AsVoidPtr(obj);
c = (char *) PyCObject_GetDesc(obj);
if (newref) Py_DECREF(obj);
goto cobject;
#else
if (!(PyString_Check(obj))) {
if (!SWIG_this)
SWIG_this = PyString_FromString("this");
pyobj = obj;
obj = PyObject_GetAttr(obj,SWIG_this);
newref = 1;
if (!obj) goto type_error;
if (!PyString_Check(obj)) {
Py_DECREF(obj);
goto type_error;
}
}
c = PyString_AsString(obj);
/* Pointer values must start with leading underscore */
if (*c != '_') {
*ptr = (void *) 0;
if (strcmp(c,"NULL") == 0) {
if (newref) { Py_DECREF(obj); }
return 0;
} else {
if (newref) { Py_DECREF(obj); }
goto type_error;
}
}
c++;
c = SWIG_UnpackData(c,ptr,sizeof(void *));
if (newref) { Py_DECREF(obj); }
#endif
#ifdef SWIG_COBJECT_TYPES
cobject:
#endif
if (ty) {
tc = SWIG_TypeCheck(c,ty);
if (!tc) goto type_error;
*ptr = SWIG_TypeCast(tc,(void*) *ptr);
}
if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {
PyObject *zero = PyInt_FromLong(0);
PyObject_SetAttrString(pyobj,(char*)"thisown",zero);
Py_DECREF(zero);
}
return 0;
type_error:
PyErr_Clear();
if (flags & SWIG_POINTER_EXCEPTION) {
if (ty && c) {
PyErr_Format(PyExc_TypeError,
"Type error. Got %s, expected %s",
c, ty->name);
} else {
PyErr_SetString(PyExc_TypeError,"Expected a pointer");
}
}
return -1;
}
/* Convert a pointer value, signal an exception on a type mismatch */
SWIGRUNTIME(void *)
SWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {
void *result;
SWIG_Python_ConvertPtr(obj, &result, ty, flags | SWIG_POINTER_EXCEPTION);
return result;
}
/* Create a new pointer object */
SWIGRUNTIME(PyObject *)
SWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {
PyObject *robj;
if (!ptr) {
Py_INCREF(Py_None);
return Py_None;
}
#ifdef SWIG_COBJECT_TYPES
robj = PyCObject_FromVoidPtrAndDesc((void *) ptr, (char *) type->name, NULL);
#else
{
char result[1024];
char *r = result;
*(r++) = '_';
r = SWIG_PackData(r,&ptr,sizeof(void *));
strcpy(r,type->name);
robj = PyString_FromString(result);
}
#endif
if (!robj || (robj == Py_None)) return robj;
if (type->clientdata) {
PyObject *inst;
PyObject *args = Py_BuildValue((char*)"(O)", robj);
Py_DECREF(robj);
inst = PyObject_CallObject((PyObject *) type->clientdata, args);
Py_DECREF(args);
if (inst) {
if (own) {
PyObject *n = PyInt_FromLong(1);
PyObject_SetAttrString(inst,(char*)"thisown",n);
Py_DECREF(n);
}
robj = inst;
}
}
return robj;
}
#endif
#ifdef __cplusplus
}
#endif
"""
######################################################################
# This is for SWIG-1.3.x where x >= 23.
# SWIG_RUNTIME_VERSION == "1"
# All this does is to include (cut/paste): <swigrun.swg>
# <python/pyrun.swg> and <runtime.swg>
swigptr2_code_v1 = """
/***********************************************************************
* swigrun.swg
*
* This file contains generic CAPI SWIG runtime support for pointer
* type checking.
*
************************************************************************/
/* This should only be incremented when either the layout of swig_type_info changes,
or for whatever reason, the runtime changes incompatibly */
#define SWIG_RUNTIME_VERSION "1"
/* define SWIG_TYPE_TABLE_NAME as "SWIG_TYPE_TABLE" */
#ifdef SWIG_TYPE_TABLE
#define SWIG_QUOTE_STRING(x) #x
#define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)
#define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)
#else
#define SWIG_TYPE_TABLE_NAME
#endif
#include <string.h>
#ifndef SWIGINLINE
#if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))
# define SWIGINLINE inline
#else
# define SWIGINLINE
#endif
#endif
/*
You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for
creating a static or dynamic library from the swig runtime code.
In 99.9% of the cases, swig just needs to declare them as 'static'.
But only do this if is strictly necessary, ie, if you have problems
with your compiler or so.
*/
#ifndef SWIGRUNTIME
#define SWIGRUNTIME static
#endif
#ifndef SWIGRUNTIMEINLINE
#define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE
#endif
#ifdef __cplusplus
extern "C" {
#endif
typedef void *(*swig_converter_func)(void *);
typedef struct swig_type_info *(*swig_dycast_func)(void **);
typedef struct swig_type_info {
const char *name;
swig_converter_func converter;
const char *str;
void *clientdata;
swig_dycast_func dcast;
struct swig_type_info *next;
struct swig_type_info *prev;
} swig_type_info;
/*
Compare two type names skipping the space characters, therefore
"char*" == "char *" and "Class<int>" == "Class<int >", etc.
Return 0 when the two name types are equivalent, as in
strncmp, but skipping ' '.
*/
SWIGRUNTIME int
SWIG_TypeNameComp(const char *f1, const char *l1,
const char *f2, const char *l2) {
for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {
while ((*f1 == ' ') && (f1 != l1)) ++f1;
while ((*f2 == ' ') && (f2 != l2)) ++f2;
if (*f1 != *f2) return *f1 - *f2;
}
return (l1 - f1) - (l2 - f2);
}
/*
Check type equivalence in a name list like <name1>|<name2>|...
*/
SWIGRUNTIME int
SWIG_TypeEquiv(const char *nb, const char *tb) {
int equiv = 0;
const char* te = tb + strlen(tb);
const char* ne = nb;
while (!equiv && *ne) {
for (nb = ne; *ne; ++ne) {
if (*ne == '|') break;
}
equiv = SWIG_TypeNameComp(nb, ne, tb, te) == 0;
if (*ne) ++ne;
}
return equiv;
}
/*
Register a type mapping with the type-checking
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeRegisterTL(swig_type_info **tl, swig_type_info *ti) {
swig_type_info *tc, *head, *ret, *next;
/* Check to see if this type has already been registered */
tc = *tl;
while (tc) {
/* check simple type equivalence */
int typeequiv = (strcmp(tc->name, ti->name) == 0);
/* check full type equivalence, resolving typedefs */
if (!typeequiv) {
/* only if tc is not a typedef (no '|' on it) */
if (tc->str && ti->str && !strstr(tc->str,"|")) {
typeequiv = SWIG_TypeEquiv(ti->str,tc->str);
}
}
if (typeequiv) {
/* Already exists in the table. Just add additional types to the list */
if (ti->clientdata) tc->clientdata = ti->clientdata;
head = tc;
next = tc->next;
goto l1;
}
tc = tc->prev;
}
head = ti;
next = 0;
/* Place in list */
ti->prev = *tl;
*tl = ti;
/* Build linked lists */
l1:
ret = head;
tc = ti + 1;
/* Patch up the rest of the links */
while (tc->name) {
head->next = tc;
tc->prev = head;
head = tc;
tc++;
}
if (next) next->prev = head;
head->next = next;
return ret;
}
/*
Check the typename
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeCheck(const char *c, swig_type_info *ty) {
swig_type_info *s;
if (!ty) return 0; /* Void pointer */
s = ty->next; /* First element always just a name */
do {
if (strcmp(s->name,c) == 0) {
if (s == ty->next) return s;
/* Move s to the top of the linked list */
s->prev->next = s->next;
if (s->next) {
s->next->prev = s->prev;
}
/* Insert s as second element in the list */
s->next = ty->next;
if (ty->next) ty->next->prev = s;
ty->next = s;
s->prev = ty;
return s;
}
s = s->next;
} while (s && (s != ty->next));
return 0;
}
/*
Cast a pointer up an inheritance hierarchy
*/
SWIGRUNTIMEINLINE void *
SWIG_TypeCast(swig_type_info *ty, void *ptr) {
return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);
}
/*
Dynamic pointer casting. Down an inheritance hierarchy
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {
swig_type_info *lastty = ty;
if (!ty || !ty->dcast) return ty;
while (ty && (ty->dcast)) {
ty = (*ty->dcast)(ptr);
if (ty) lastty = ty;
}
return lastty;
}
/*
Return the name associated with this type
*/
SWIGRUNTIMEINLINE const char *
SWIG_TypeName(const swig_type_info *ty) {
return ty->name;
}
/*
Return the pretty name associated with this type,
that is an unmangled type name in a form presentable to the user.
*/
SWIGRUNTIME const char *
SWIG_TypePrettyName(const swig_type_info *type) {
/* The "str" field contains the equivalent pretty names of the
type, separated by vertical-bar characters. We choose
to print the last name, as it is often (?) the most
specific. */
if (type->str != NULL) {
const char *last_name = type->str;
const char *s;
for (s = type->str; *s; s++)
if (*s == '|') last_name = s+1;
return last_name;
}
else
return type->name;
}
/*
Search for a swig_type_info structure
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeQueryTL(swig_type_info *tl, const char *name) {
swig_type_info *ty = tl;
while (ty) {
if (ty->str && (SWIG_TypeEquiv(ty->str,name))) return ty;
if (ty->name && (strcmp(name,ty->name) == 0)) return ty;
ty = ty->prev;
}
return 0;
}
/*
Set the clientdata field for a type
*/
SWIGRUNTIME void
SWIG_TypeClientDataTL(swig_type_info *tl, swig_type_info *ti, void *clientdata) {
swig_type_info *tc, *equiv;
if (ti->clientdata) return;
/* if (ti->clientdata == clientdata) return; */
ti->clientdata = clientdata;
equiv = ti->next;
while (equiv) {
if (!equiv->converter) {
tc = tl;
while (tc) {
if ((strcmp(tc->name, equiv->name) == 0))
SWIG_TypeClientDataTL(tl,tc,clientdata);
tc = tc->prev;
}
}
equiv = equiv->next;
}
}
/*
Pack binary data into a string
*/
SWIGRUNTIME char *
SWIG_PackData(char *c, void *ptr, size_t sz) {
static char hex[17] = "0123456789abcdef";
unsigned char *u = (unsigned char *) ptr;
const unsigned char *eu = u + sz;
register unsigned char uu;
for (; u != eu; ++u) {
uu = *u;
*(c++) = hex[(uu & 0xf0) >> 4];
*(c++) = hex[uu & 0xf];
}
return c;
}
/*
Unpack binary data from a string
*/
SWIGRUNTIME const char *
SWIG_UnpackData(const char *c, void *ptr, size_t sz) {
register unsigned char *u = (unsigned char *) ptr;
register const unsigned char *eu = u + sz;
for (; u != eu; ++u) {
register int d = *(c++);
register unsigned char uu = 0;
if ((d >= '0') && (d <= '9'))
uu = ((d - '0') << 4);
else if ((d >= 'a') && (d <= 'f'))
uu = ((d - ('a'-10)) << 4);
else
return (char *) 0;
d = *(c++);
if ((d >= '0') && (d <= '9'))
uu |= (d - '0');
else if ((d >= 'a') && (d <= 'f'))
uu |= (d - ('a'-10));
else
return (char *) 0;
*u = uu;
}
return c;
}
/*
This function will propagate the clientdata field of type to any new
swig_type_info structures that have been added into the list of
equivalent types. It is like calling SWIG_TypeClientData(type,
clientdata) a second time.
*/
SWIGRUNTIME void
SWIG_PropagateClientDataTL(swig_type_info *tl, swig_type_info *type) {
swig_type_info *equiv = type->next;
swig_type_info *tc;
if (!type->clientdata) return;
while (equiv) {
if (!equiv->converter) {
tc = tl;
while (tc) {
if ((strcmp(tc->name, equiv->name) == 0) && !tc->clientdata)
SWIG_TypeClientDataTL(tl,tc, type->clientdata);
tc = tc->prev;
}
}
equiv = equiv->next;
}
}
/*
Pack 'void *' into a string buffer.
*/
SWIGRUNTIME char *
SWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {
char *r = buff;
if ((2*sizeof(void *) + 2) > bsz) return 0;
*(r++) = '_';
r = SWIG_PackData(r,&ptr,sizeof(void *));
if (strlen(name) + 1 > (bsz - (r - buff))) return 0;
strcpy(r,name);
return buff;
}
SWIGRUNTIME const char *
SWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {
if (*c != '_') {
if (strcmp(c,"NULL") == 0) {
*ptr = (void *) 0;
return name;
} else {
return 0;
}
}
return SWIG_UnpackData(++c,ptr,sizeof(void *));
}
SWIGRUNTIME char *
SWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {
char *r = buff;
size_t lname = (name ? strlen(name) : 0);
if ((2*sz + 2 + lname) > bsz) return 0;
*(r++) = '_';
r = SWIG_PackData(r,ptr,sz);
if (lname) {
strncpy(r,name,lname+1);
} else {
*r = 0;
}
return buff;
}
SWIGRUNTIME const char *
SWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {
if (*c != '_') {
if (strcmp(c,"NULL") == 0) {
memset(ptr,0,sz);
return name;
} else {
return 0;
}
}
return SWIG_UnpackData(++c,ptr,sz);
}
#ifdef __cplusplus
}
#endif
/***********************************************************************
* pyrun.swg
*
* This file contains the runtime support for Python modules
* and includes code for managing global variables and pointer
* type checking.
*
* Author : David Beazley (beazley@cs.uchicago.edu)
************************************************************************/
/* Common SWIG API */
#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)
#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)
#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)
/* Python-specific SWIG API */
#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)
#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)
/* -----------------------------------------------------------------------------
* Pointer declarations
* ----------------------------------------------------------------------------- */
/*
Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent
C/C++ pointers in the python side. Very useful for debugging, but
not always safe.
*/
#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)
# define SWIG_COBJECT_TYPES
#endif
/* Flags for pointer conversion */
#define SWIG_POINTER_EXCEPTION 0x1
#define SWIG_POINTER_DISOWN 0x2
#ifdef __cplusplus
extern "C" {
#endif
/* -----------------------------------------------------------------------------
* Create a new pointer string
* ----------------------------------------------------------------------------- */
#ifndef SWIG_BUFFER_SIZE
#define SWIG_BUFFER_SIZE 1024
#endif
#if defined(SWIG_COBJECT_TYPES)
#if !defined(SWIG_COBJECT_PYTHON)
/* -----------------------------------------------------------------------------
* Implements a simple Swig Object type, and use it instead of PyCObject
* ----------------------------------------------------------------------------- */
typedef struct {
PyObject_HEAD
void *ptr;
const char *desc;
} PySwigObject;
/* Declarations for objects of type PySwigObject */
SWIGRUNTIME int
PySwigObject_print(PySwigObject *v, FILE *fp, int flags)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {
fputs("<Swig Object at ", fp); fputs(result, fp); fputs(">", fp);
return 0;
} else {
return 1;
}
}
SWIGRUNTIME PyObject *
PySwigObject_repr(PySwigObject *v)
{
char result[SWIG_BUFFER_SIZE];
return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?
PyString_FromFormat("<Swig Object at %s>", result) : 0;
}
SWIGRUNTIME PyObject *
PySwigObject_str(PySwigObject *v)
{
char result[SWIG_BUFFER_SIZE];
return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?
PyString_FromString(result) : 0;
}
SWIGRUNTIME PyObject *
PySwigObject_long(PySwigObject *v)
{
return PyLong_FromUnsignedLong((unsigned long) v->ptr);
}
SWIGRUNTIME PyObject *
PySwigObject_oct(PySwigObject *v)
{
char buf[100];
unsigned long x = (unsigned long)v->ptr;
if (x == 0)
strcpy(buf, "0");
else
PyOS_snprintf(buf, sizeof(buf), "0%lo", x);
return PyString_FromString(buf);
}
SWIGRUNTIME PyObject *
PySwigObject_hex(PySwigObject *v)
{
char buf[100];
PyOS_snprintf(buf, sizeof(buf), "0x%lx", (unsigned long)v->ptr);
return PyString_FromString(buf);
}
SWIGRUNTIME int
PySwigObject_compare(PySwigObject *v, PySwigObject *w)
{
int c = strcmp(v->desc, w->desc);
if (c) {
return c;
} else {
void *i = v->ptr;
void *j = w->ptr;
return (i < j) ? -1 : (i > j) ? 1 : 0;
}
}
SWIGRUNTIME void
PySwigObject_dealloc(PySwigObject *self)
{
PyObject_DEL(self);
}
SWIGRUNTIME PyTypeObject*
PySwigObject_GetType() {
static char PySwigObject_Type__doc__[] =
"Swig object carries a C/C++ instance pointer";
static PyNumberMethods PySwigObject_as_number = {
(binaryfunc)0, /*nb_add*/
(binaryfunc)0, /*nb_subtract*/
(binaryfunc)0, /*nb_multiply*/
(binaryfunc)0, /*nb_divide*/
(binaryfunc)0, /*nb_remainder*/
(binaryfunc)0, /*nb_divmod*/
(ternaryfunc)0,/*nb_power*/
(unaryfunc)0, /*nb_negative*/
(unaryfunc)0, /*nb_positive*/
(unaryfunc)0, /*nb_absolute*/
(inquiry)0, /*nb_nonzero*/
0, /*nb_invert*/
0, /*nb_lshift*/
0, /*nb_rshift*/
0, /*nb_and*/
0, /*nb_xor*/
0, /*nb_or*/
(coercion)0, /*nb_coerce*/
(unaryfunc)PySwigObject_long, /*nb_int*/
(unaryfunc)PySwigObject_long, /*nb_long*/
(unaryfunc)0, /*nb_float*/
(unaryfunc)PySwigObject_oct, /*nb_oct*/
(unaryfunc)PySwigObject_hex, /*nb_hex*/
#if PY_VERSION_HEX >= 0x02000000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */
#endif
};
static PyTypeObject PySwigObject_Type = {
PyObject_HEAD_INIT(&PyType_Type)
0, /*ob_size*/
"PySwigObject", /*tp_name*/
sizeof(PySwigObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor)PySwigObject_dealloc, /*tp_dealloc*/
(printfunc)PySwigObject_print, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc)PySwigObject_compare, /*tp_compare*/
(reprfunc)PySwigObject_repr, /*tp_repr*/
&PySwigObject_as_number, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
(hashfunc)0, /*tp_hash*/
(ternaryfunc)0, /*tp_call*/
(reprfunc)PySwigObject_str, /*tp_str*/
/* Space for future expansion */
0L,0L,0L,0L,
PySwigObject_Type__doc__, /* Documentation string */
#if PY_VERSION_HEX >= 0x02000000
0, /* tp_traverse */
0, /* tp_clear */
#endif
#if PY_VERSION_HEX >= 0x02010000
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
#endif
#if PY_VERSION_HEX >= 0x02020000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */
#endif
#if PY_VERSION_HEX >= 0x02030000
0, /* tp_del */
#endif
#ifdef COUNT_ALLOCS
0,0,0,0 /* tp_alloc -> tp_next */
#endif
};
return &PySwigObject_Type;
}
SWIGRUNTIME PyObject *
PySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)
{
PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_GetType());
if (self == NULL) return NULL;
self->ptr = ptr;
self->desc = desc;
return (PyObject *)self;
}
SWIGRUNTIMEINLINE void *
PySwigObject_AsVoidPtr(PyObject *self)
{
return ((PySwigObject *)self)->ptr;
}
SWIGRUNTIMEINLINE const char *
PySwigObject_GetDesc(PyObject *self)
{
return ((PySwigObject *)self)->desc;
}
SWIGRUNTIMEINLINE int
PySwigObject_Check(PyObject *op) {
return ((op)->ob_type == PySwigObject_GetType())
|| (strcmp((op)->ob_type->tp_name,"PySwigObject") == 0);
}
/* -----------------------------------------------------------------------------
* Implements a simple Swig Packed type, and use it instead of string
* ----------------------------------------------------------------------------- */
typedef struct {
PyObject_HEAD
void *pack;
const char *desc;
size_t size;
} PySwigPacked;
SWIGRUNTIME int
PySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)
{
char result[SWIG_BUFFER_SIZE];
fputs("<Swig Packed ", fp);
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {
fputs("at ", fp);
fputs(result, fp);
}
fputs(v->desc,fp);
fputs(">", fp);
return 0;
}
SWIGRUNTIME PyObject *
PySwigPacked_repr(PySwigPacked *v)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {
return PyString_FromFormat("<Swig Packed at %s%s>", result, v->desc);
} else {
return PyString_FromFormat("<Swig Packed %s>", v->desc);
}
}
SWIGRUNTIME PyObject *
PySwigPacked_str(PySwigPacked *v)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){
return PyString_FromFormat("%s%s", result, v->desc);
} else {
return PyString_FromFormat("%s", v->desc);
}
}
SWIGRUNTIME int
PySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)
{
int c = strcmp(v->desc, w->desc);
if (c) {
return c;
} else {
size_t i = v->size;
size_t j = w->size;
int s = (i < j) ? -1 : (i > j) ? 1 : 0;
return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);
}
}
SWIGRUNTIME void
PySwigPacked_dealloc(PySwigPacked *self)
{
free(self->pack);
PyObject_DEL(self);
}
SWIGRUNTIME PyTypeObject*
PySwigPacked_GetType() {
static char PySwigPacked_Type__doc__[] =
"Swig object carries a C/C++ instance pointer";
static PyTypeObject PySwigPacked_Type = {
PyObject_HEAD_INIT(&PyType_Type)
0, /*ob_size*/
"PySwigPacked", /*tp_name*/
sizeof(PySwigPacked), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor)PySwigPacked_dealloc, /*tp_dealloc*/
(printfunc)PySwigPacked_print, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc)PySwigPacked_compare, /*tp_compare*/
(reprfunc)PySwigPacked_repr, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
(hashfunc)0, /*tp_hash*/
(ternaryfunc)0, /*tp_call*/
(reprfunc)PySwigPacked_str, /*tp_str*/
/* Space for future expansion */
0L,0L,0L,0L,
PySwigPacked_Type__doc__, /* Documentation string */
#if PY_VERSION_HEX >= 0x02000000
0, /* tp_traverse */
0, /* tp_clear */
#endif
#if PY_VERSION_HEX >= 0x02010000
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
#endif
#if PY_VERSION_HEX >= 0x02020000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */
#endif
#if PY_VERSION_HEX >= 0x02030000
0, /* tp_del */
#endif
#ifdef COUNT_ALLOCS
0,0,0,0 /* tp_alloc -> tp_next */
#endif
};
return &PySwigPacked_Type;
}
SWIGRUNTIME PyObject *
PySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)
{
PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_GetType());
if (self == NULL) {
return NULL;
} else {
void *pack = malloc(size);
memcpy(pack, ptr, size);
self->pack = pack;
self->desc = desc;
self->size = size;
return (PyObject *) self;
}
}
SWIGRUNTIMEINLINE const char *
PySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)
{
PySwigPacked *self = (PySwigPacked *)obj;
if (self->size != size) return 0;
memcpy(ptr, self->pack, size);
return self->desc;
}
SWIGRUNTIMEINLINE const char *
PySwigPacked_GetDesc(PyObject *self)
{
return ((PySwigPacked *)self)->desc;
}
SWIGRUNTIMEINLINE int
PySwigPacked_Check(PyObject *op) {
return ((op)->ob_type == PySwigPacked_GetType())
|| (strcmp((op)->ob_type->tp_name,"PySwigPacked") == 0);
}
#else
/* -----------------------------------------------------------------------------
* Use the old Python PyCObject instead of PySwigObject
* ----------------------------------------------------------------------------- */
#define PySwigObject_GetDesc(obj) PyCObject_GetDesc(obj)
#define PySwigObject_Check(obj) PyCObject_Check(obj)
#define PySwigObject_AsVoidPtr(obj) PyCObject_AsVoidPtr(obj)
#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)
#endif
#endif
/* -----------------------------------------------------------------------------
* errors manipulation
* ----------------------------------------------------------------------------- */
SWIGRUNTIME void
SWIG_Python_TypeError(const char *type, PyObject *obj)
{
if (type) {
#if defined(SWIG_COBJECT_TYPES)
if (PySwigObject_Check(obj)) {
const char *otype = (const char *) PySwigObject_GetDesc(obj);
if (otype) {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, 'PySwigObject(%s)' is received",
type, otype);
return;
}
} else
#endif
{
const char *otype = (obj ? obj->ob_type->tp_name : 0);
if (otype) {
PyObject *str = PyObject_Str(obj);
const char *cstr = str ? PyString_AsString(str) : 0;
if (cstr) {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, '%s(%s)' is received",
type, otype, cstr);
} else {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, '%s' is received",
type, otype);
}
Py_DECREF(str);
return;
}
}
PyErr_Format(PyExc_TypeError, "a '%s' is expected", type);
} else {
PyErr_Format(PyExc_TypeError, "unexpected type is received");
}
}
SWIGRUNTIMEINLINE void
SWIG_Python_NullRef(const char *type)
{
if (type) {
PyErr_Format(PyExc_TypeError, "null reference of type '%s' was received",type);
} else {
PyErr_Format(PyExc_TypeError, "null reference was received");
}
}
SWIGRUNTIME int
SWIG_Python_AddErrMesg(const char* mesg, int infront)
{
if (PyErr_Occurred()) {
PyObject *type = 0;
PyObject *value = 0;
PyObject *traceback = 0;
PyErr_Fetch(&type, &value, &traceback);
if (value) {
PyObject *old_str = PyObject_Str(value);
Py_XINCREF(type);
PyErr_Clear();
if (infront) {
PyErr_Format(type, "%s %s", mesg, PyString_AsString(old_str));
} else {
PyErr_Format(type, "%s %s", PyString_AsString(old_str), mesg);
}
Py_DECREF(old_str);
}
return 1;
} else {
return 0;
}
}
SWIGRUNTIME int
SWIG_Python_ArgFail(int argnum)
{
if (PyErr_Occurred()) {
/* add information about failing argument */
char mesg[256];
sprintf(mesg, "argument number %d:", argnum);
return SWIG_Python_AddErrMesg(mesg, 1);
} else {
return 0;
}
}
/* -----------------------------------------------------------------------------
* pointers/data manipulation
* ----------------------------------------------------------------------------- */
/* Convert a pointer value */
SWIGRUNTIME int
SWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {
swig_type_info *tc;
const char *c = 0;
static PyObject *SWIG_this = 0;
int newref = 0;
PyObject *pyobj = 0;
void *vptr;
if (!obj) return 0;
if (obj == Py_None) {
*ptr = 0;
return 0;
}
#ifdef SWIG_COBJECT_TYPES
if (!(PySwigObject_Check(obj))) {
if (!SWIG_this)
SWIG_this = PyString_FromString("this");
pyobj = obj;
obj = PyObject_GetAttr(obj,SWIG_this);
newref = 1;
if (!obj) goto type_error;
if (!PySwigObject_Check(obj)) {
Py_DECREF(obj);
goto type_error;
}
}
vptr = PySwigObject_AsVoidPtr(obj);
c = (const char *) PySwigObject_GetDesc(obj);
if (newref) { Py_DECREF(obj); }
goto type_check;
#else
if (!(PyString_Check(obj))) {
if (!SWIG_this)
SWIG_this = PyString_FromString("this");
pyobj = obj;
obj = PyObject_GetAttr(obj,SWIG_this);
newref = 1;
if (!obj) goto type_error;
if (!PyString_Check(obj)) {
Py_DECREF(obj);
goto type_error;
}
}
c = PyString_AS_STRING(obj);
/* Pointer values must start with leading underscore */
c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);
if (newref) { Py_DECREF(obj); }
if (!c) goto type_error;
#endif
type_check:
if (ty) {
tc = SWIG_TypeCheck(c,ty);
if (!tc) goto type_error;
*ptr = SWIG_TypeCast(tc,vptr);
}
if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {
PyObject_SetAttrString(pyobj,(char*)"thisown",Py_False);
}
return 0;
type_error:
PyErr_Clear();
if (pyobj && !obj) {
obj = pyobj;
if (PyCFunction_Check(obj)) {
/* here we get the method pointer for callbacks */
char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);
c = doc ? strstr(doc, "swig_ptr: ") : 0;
if (c) {
c = SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name);
if (!c) goto type_error;
goto type_check;
}
}
}
if (flags & SWIG_POINTER_EXCEPTION) {
if (ty) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
} else {
SWIG_Python_TypeError("C/C++ pointer", obj);
}
}
return -1;
}
/* Convert a pointer value, signal an exception on a type mismatch */
SWIGRUNTIME void *
SWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {
void *result;
if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {
PyErr_Clear();
if (flags & SWIG_POINTER_EXCEPTION) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
SWIG_Python_ArgFail(argnum);
}
}
return result;
}
/* Convert a packed value value */
SWIGRUNTIME int
SWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {
swig_type_info *tc;
const char *c = 0;
#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)
c = PySwigPacked_UnpackData(obj, ptr, sz);
#else
if ((!obj) || (!PyString_Check(obj))) goto type_error;
c = PyString_AS_STRING(obj);
/* Pointer values must start with leading underscore */
c = SWIG_UnpackDataName(c, ptr, sz, ty->name);
#endif
if (!c) goto type_error;
if (ty) {
tc = SWIG_TypeCheck(c,ty);
if (!tc) goto type_error;
}
return 0;
type_error:
PyErr_Clear();
if (flags & SWIG_POINTER_EXCEPTION) {
if (ty) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
} else {
SWIG_Python_TypeError("C/C++ packed data", obj);
}
}
return -1;
}
/* Create a new array object */
SWIGRUNTIME PyObject *
SWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {
PyObject *robj = 0;
if (!ptr) {
Py_INCREF(Py_None);
return Py_None;
}
#ifdef SWIG_COBJECT_TYPES
robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);
#else
{
char result[SWIG_BUFFER_SIZE];
robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?
PyString_FromString(result) : 0;
}
#endif
if (!robj || (robj == Py_None)) return robj;
if (type->clientdata) {
PyObject *inst;
PyObject *args = Py_BuildValue((char*)"(O)", robj);
Py_DECREF(robj);
inst = PyObject_CallObject((PyObject *) type->clientdata, args);
Py_DECREF(args);
if (inst) {
if (own) {
PyObject_SetAttrString(inst,(char*)"thisown",Py_True);
}
robj = inst;
}
}
return robj;
}
SWIGRUNTIME PyObject *
SWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {
PyObject *robj = 0;
if (!ptr) {
Py_INCREF(Py_None);
return Py_None;
}
#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)
robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);
#else
{
char result[SWIG_BUFFER_SIZE];
robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?
PyString_FromString(result) : 0;
}
#endif
return robj;
}
/* -----------------------------------------------------------------------------*
* Get type list
* -----------------------------------------------------------------------------*/
#ifdef SWIG_LINK_RUNTIME
void *SWIG_ReturnGlobalTypeList(void *);
#endif
SWIGRUNTIME swig_type_info **
SWIG_Python_GetTypeListHandle() {
static void *type_pointer = (void *)0;
/* first check if module already created */
if (!type_pointer) {
#ifdef SWIG_LINK_RUNTIME
type_pointer = SWIG_ReturnGlobalTypeList((void *)0);
#else
type_pointer = PyCObject_Import((char*)"swig_runtime_data" SWIG_RUNTIME_VERSION,
(char*)"type_pointer" SWIG_TYPE_TABLE_NAME);
if (PyErr_Occurred()) {
PyErr_Clear();
type_pointer = (void *)0;
}
}
#endif
return (swig_type_info **) type_pointer;
}
/*
Search for a swig_type_info structure
*/
SWIGRUNTIMEINLINE swig_type_info *
SWIG_Python_GetTypeList() {
swig_type_info **tlh = SWIG_Python_GetTypeListHandle();
return tlh ? *tlh : (swig_type_info*)0;
}
#define SWIG_Runtime_GetTypeList SWIG_Python_GetTypeList
#ifdef __cplusplus
}
#endif
/* -----------------------------------------------------------------------------*
Standard SWIG API for use inside user code.
You need to include in your code as follow:
#include <Python.h> // or using your favorite language
#include <swigrun.swg>
#include <python/pyrun.swg> // or using your favorite language
#include <runtime.swg>
* -----------------------------------------------------------------------------*/
SWIGRUNTIMEINLINE swig_type_info *
SWIG_Runtime_TypeQuery(const char *name) {
swig_type_info *tl = SWIG_Runtime_GetTypeList();
return SWIG_TypeQueryTL(tl, name);
}
SWIGRUNTIMEINLINE swig_type_info *
SWIG_Runtime_TypeRegister(swig_type_info *ti) {
swig_type_info *tl = SWIG_Runtime_GetTypeList();
return SWIG_TypeRegisterTL(&tl, ti);
}
SWIGRUNTIMEINLINE void
SWIG_Runtime_TypeClientData(swig_type_info *ti, void *clientdata) {
swig_type_info *tl = SWIG_Runtime_GetTypeList();
SWIG_TypeClientDataTL(tl, ti, clientdata);
}
SWIGRUNTIMEINLINE void
SWIG_Runtime_PropagateClientData(swig_type_info *type) {
swig_type_info *tl = SWIG_Runtime_GetTypeList();
SWIG_PropagateClientDataTL(tl, type);
}
#define SWIG_GetTypeList() SWIG_Runtime_GetTypeList()
#define SWIG_TypeQuery(name) SWIG_Runtime_TypeQuery(name)
#define SWIG_TypeRegister(ti) SWIG_Runtime_TypeRegister(ti)
#define SWIG_TypeClientData(ti, cd) SWIG_Runtime_TypeClientData(ti, cd)
#define SWIG_PropagateClientData(ti) SWIG_Runtime_PropagateClientData(ti)
"""
######################################################################
# This is for SWIG-1.3.x where x >= 25.
# SWIG_RUNTIME_VERSION == "2"
# All this does is to include the contents of the file generated by
# this command:
# swig -python -external-runtime
swigptr2_code_v2 = """
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.25
*
* This file is not intended to be easily readable and contains a number of
* coding conventions designed to improve portability and efficiency. Do not make
* changes to this file unless you know what you are doing--modify the SWIG
* interface file instead.
* ----------------------------------------------------------------------------- */
/***********************************************************************
*
* This section contains generic SWIG labels for method/variable
* declarations/attributes, and other compiler dependent labels.
*
************************************************************************/
/*
SWIGTEMPLATEDISAMBIGUATOR is needed when wrapping template calls
(cwrap.c:Swig_cfunction_call/Swig_cmethod_call), as in
result = nspace::template function<int >(arg1);
result = arg1->template method<int >(arg2);
SWIGTEMPLATEDISAMBIGUATOR is compiler dependent (common.swg),
- SUN Studio requires 'template',
- gcc-3.4 forbids the use of 'template'.
- gcc-3.2.3 produces internal errors if you use 'template'
*/
#ifndef SWIGTEMPLATEDISAMBIGUATOR
# if defined(__SUNPRO_CC)
# define SWIGTEMPLATEDISAMBIGUATOR template
# else
# define SWIGTEMPLATEDISAMBIGUATOR
# endif
#endif
/* inline attribute */
#ifndef SWIGINLINE
# if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))
# define SWIGINLINE inline
# else
# define SWIGINLINE
# endif
#endif
/* attritbute passed for some compilers to avoid 'unused' warnings */
#ifndef SWIGUNUSED
# if defined(__GNUC__) || defined(__ICC)
# define SWIGUNUSED __attribute__ ((unused))
# else
# define SWIGUNUSED
# endif
#endif
/* internal SWIG method */
#ifndef SWIGINTERN
# define SWIGINTERN static SWIGUNUSED
#endif
/* internal inline SWIG method */
#ifndef SWIGINTERNINLINE
# define SWIGINTERNINLINE SWIGINTERN SWIGINLINE
#endif
/* how we export a method such that it can go in to a shared or dll library */
#ifndef SWIGEXPORT
# if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)
# if defined(_MSC_VER) || defined(__GNUC__)
# if defined(STATIC_LINKED)
# define SWIGEXPORT(a) a
# else
# define SWIGEXPORT(a) __declspec(dllexport) a
# endif
# else
# if defined(__BORLANDC__)
# define SWIGEXPORT(a) a _export
# else
# define SWIGEXPORT(a) a
# endif
# endif
# else
# define SWIGEXPORT(a) a
# endif
#endif
/***********************************************************************
* swigrun.swg
*
* This file contains generic CAPI SWIG runtime support for pointer
* type checking.
*
************************************************************************/
/* This should only be incremented when either the layout of swig_type_info changes,
or for whatever reason, the runtime changes incompatibly */
#define SWIG_RUNTIME_VERSION "2"
/* define SWIG_TYPE_TABLE_NAME as "SWIG_TYPE_TABLE" */
#ifdef SWIG_TYPE_TABLE
# define SWIG_QUOTE_STRING(x) #x
# define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)
# define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)
#else
# define SWIG_TYPE_TABLE_NAME
#endif
/*
You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for
creating a static or dynamic library from the swig runtime code.
In 99.9% of the cases, swig just needs to declare them as 'static'.
But only do this if is strictly necessary, ie, if you have problems
with your compiler or so.
*/
#ifndef SWIGRUNTIME
# define SWIGRUNTIME SWIGINTERN
#endif
#ifndef SWIGRUNTIMEINLINE
# define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE
#endif
#include <string.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef void *(*swig_converter_func)(void *);
typedef struct swig_type_info *(*swig_dycast_func)(void **);
/* Structure to store inforomation on one type */
typedef struct swig_type_info {
const char *name; /* mangled name of this type */
const char *str; /* human readable name of this type */
swig_dycast_func dcast; /* dynamic cast function down a hierarchy */
struct swig_cast_info *cast; /* linked list of types that can cast into this type */
void *clientdata; /* language specific type data */
} swig_type_info;
/* Structure to store a type and conversion function used for casting */
typedef struct swig_cast_info {
swig_type_info *type; /* pointer to type that is equivalent to this type */
swig_converter_func converter; /* function to cast the void pointers */
struct swig_cast_info *next; /* pointer to next cast in linked list */
struct swig_cast_info *prev; /* pointer to the previous cast */
} swig_cast_info;
/* Structure used to store module information
* Each module generates one structure like this, and the runtime collects
* all of these structures and stores them in a circularly linked list.*/
typedef struct swig_module_info {
swig_type_info **types; /* Array of pointers to swig_type_info structures that are in this module */
size_t size; /* Number of types in this module */
struct swig_module_info *next; /* Pointer to next element in circularly linked list */
swig_type_info **type_initial; /* Array of initially generated type structures */
swig_cast_info **cast_initial; /* Array of initially generated casting structures */
void *clientdata; /* Language specific module data */
} swig_module_info;
/*
Compare two type names skipping the space characters, therefore
"char*" == "char *" and "Class<int>" == "Class<int >", etc.
Return 0 when the two name types are equivalent, as in
strncmp, but skipping ' '.
*/
SWIGRUNTIME int
SWIG_TypeNameComp(const char *f1, const char *l1,
const char *f2, const char *l2) {
for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {
while ((*f1 == ' ') && (f1 != l1)) ++f1;
while ((*f2 == ' ') && (f2 != l2)) ++f2;
if (*f1 != *f2) return (int)(*f1 - *f2);
}
return (l1 - f1) - (l2 - f2);
}
/*
Check type equivalence in a name list like <name1>|<name2>|...
Return 0 if not equal, 1 if equal
*/
SWIGRUNTIME int
SWIG_TypeEquiv(const char *nb, const char *tb) {
int equiv = 0;
const char* te = tb + strlen(tb);
const char* ne = nb;
while (!equiv && *ne) {
for (nb = ne; *ne; ++ne) {
if (*ne == '|') break;
}
equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;
if (*ne) ++ne;
}
return equiv;
}
/*
Check type equivalence in a name list like <name1>|<name2>|...
Return 0 if equal, -1 if nb < tb, 1 if nb > tb
*/
SWIGRUNTIME int
SWIG_TypeCompare(const char *nb, const char *tb) {
int equiv = 0;
const char* te = tb + strlen(tb);
const char* ne = nb;
while (!equiv && *ne) {
for (nb = ne; *ne; ++ne) {
if (*ne == '|') break;
}
equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;
if (*ne) ++ne;
}
return equiv;
}
/* think of this as a c++ template<> or a scheme macro */
#define SWIG_TypeCheck_Template(comparison, ty) \
if (ty) { \
swig_cast_info *iter = ty->cast; \
while (iter) { \
if (comparison) { \
if (iter == ty->cast) return iter; \
/* Move iter to the top of the linked list */ \
iter->prev->next = iter->next; \
if (iter->next) \
iter->next->prev = iter->prev; \
iter->next = ty->cast; \
iter->prev = 0; \
if (ty->cast) ty->cast->prev = iter; \
ty->cast = iter; \
return iter; \
} \
iter = iter->next; \
} \
} \
return 0
/*
Check the typename
*/
SWIGRUNTIME swig_cast_info *
SWIG_TypeCheck(const char *c, swig_type_info *ty) {
SWIG_TypeCheck_Template(strcmp(iter->type->name, c) == 0, ty);
}
/* Same as previous function, except strcmp is replaced with a pointer comparison */
SWIGRUNTIME swig_cast_info *
SWIG_TypeCheckStruct(swig_type_info *from, swig_type_info *into) {
SWIG_TypeCheck_Template(iter->type == from, into);
}
/*
Cast a pointer up an inheritance hierarchy
*/
SWIGRUNTIMEINLINE void *
SWIG_TypeCast(swig_cast_info *ty, void *ptr) {
return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);
}
/*
Dynamic pointer casting. Down an inheritance hierarchy
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {
swig_type_info *lastty = ty;
if (!ty || !ty->dcast) return ty;
while (ty && (ty->dcast)) {
ty = (*ty->dcast)(ptr);
if (ty) lastty = ty;
}
return lastty;
}
/*
Return the name associated with this type
*/
SWIGRUNTIMEINLINE const char *
SWIG_TypeName(const swig_type_info *ty) {
return ty->name;
}
/*
Return the pretty name associated with this type,
that is an unmangled type name in a form presentable to the user.
*/
SWIGRUNTIME const char *
SWIG_TypePrettyName(const swig_type_info *type) {
/* The "str" field contains the equivalent pretty names of the
type, separated by vertical-bar characters. We choose
to print the last name, as it is often (?) the most
specific. */
if (type->str != NULL) {
const char *last_name = type->str;
const char *s;
for (s = type->str; *s; s++)
if (*s == '|') last_name = s+1;
return last_name;
}
else
return type->name;
}
/*
Set the clientdata field for a type
*/
SWIGRUNTIME void
SWIG_TypeClientData(swig_type_info *ti, void *clientdata) {
if (!ti->clientdata) {
swig_cast_info *cast = ti->cast;
/* if (ti->clientdata == clientdata) return; */
ti->clientdata = clientdata;
while (cast) {
if (!cast->converter)
SWIG_TypeClientData(cast->type, clientdata);
cast = cast->next;
}
}
}
/*
Search for a swig_type_info structure only by mangled name
Search is a O(log #types)
We start searching at module start, and finish searching when start == end.
Note: if start == end at the beginning of the function, we go all the way around
the circular list.
*/
SWIGRUNTIME swig_type_info *
SWIG_MangledTypeQueryModule(swig_module_info *start,
swig_module_info *end,
const char *name) {
swig_module_info *iter = start;
do {
if (iter->size) {
register size_t l = 0;
register size_t r = iter->size - 1;
do {
/* since l+r >= 0, we can (>> 1) instead (/ 2) */
register size_t i = (l + r) >> 1;
const char *iname = iter->types[i]->name;
if (iname) {
register int compare = strcmp(name, iname);
if (compare == 0) {
return iter->types[i];
} else if (compare < 0) {
if (i) {
r = i - 1;
} else {
break;
}
} else if (compare > 0) {
l = i + 1;
}
} else {
break; /* should never happen */
}
} while (l <= r);
}
iter = iter->next;
} while (iter != end);
return 0;
}
/*
Search for a swig_type_info structure for either a mangled name or a human readable name.
It first searches the mangled names of the types, which is a O(log #types)
If a type is not found it then searches the human readable names, which is O(#types).
We start searching at module start, and finish searching when start == end.
Note: if start == end at the beginning of the function, we go all the way around
the circular list.
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeQueryModule(swig_module_info *start,
swig_module_info *end,
const char *name) {
/* STEP 1: Search the name field using binary search */
swig_type_info *ret = SWIG_MangledTypeQueryModule(start, end, name);
if (ret) {
return ret;
} else {
/* STEP 2: If the type hasn't been found, do a complete search
of the str field (the human readable name) */
swig_module_info *iter = start;
do {
register size_t i = 0;
for (; i < iter->size; ++i) {
if (iter->types[i]->str && (SWIG_TypeEquiv(iter->types[i]->str, name)))
return iter->types[i];
}
iter = iter->next;
} while (iter != end);
}
/* neither found a match */
return 0;
}
/*
Pack binary data into a string
*/
SWIGRUNTIME char *
SWIG_PackData(char *c, void *ptr, size_t sz) {
static const char hex[17] = "0123456789abcdef";
register const unsigned char *u = (unsigned char *) ptr;
register const unsigned char *eu = u + sz;
for (; u != eu; ++u) {
register unsigned char uu = *u;
*(c++) = hex[(uu & 0xf0) >> 4];
*(c++) = hex[uu & 0xf];
}
return c;
}
/*
Unpack binary data from a string
*/
SWIGRUNTIME const char *
SWIG_UnpackData(const char *c, void *ptr, size_t sz) {
register unsigned char *u = (unsigned char *) ptr;
register const unsigned char *eu = u + sz;
for (; u != eu; ++u) {
register char d = *(c++);
register unsigned char uu = 0;
if ((d >= '0') && (d <= '9'))
uu = ((d - '0') << 4);
else if ((d >= 'a') && (d <= 'f'))
uu = ((d - ('a'-10)) << 4);
else
return (char *) 0;
d = *(c++);
if ((d >= '0') && (d <= '9'))
uu |= (d - '0');
else if ((d >= 'a') && (d <= 'f'))
uu |= (d - ('a'-10));
else
return (char *) 0;
*u = uu;
}
return c;
}
/*
Pack 'void *' into a string buffer.
*/
SWIGRUNTIME char *
SWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {
char *r = buff;
if ((2*sizeof(void *) + 2) > bsz) return 0;
*(r++) = '_';
r = SWIG_PackData(r,&ptr,sizeof(void *));
if (strlen(name) + 1 > (bsz - (r - buff))) return 0;
strcpy(r,name);
return buff;
}
SWIGRUNTIME const char *
SWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {
if (*c != '_') {
if (strcmp(c,"NULL") == 0) {
*ptr = (void *) 0;
return name;
} else {
return 0;
}
}
return SWIG_UnpackData(++c,ptr,sizeof(void *));
}
SWIGRUNTIME char *
SWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {
char *r = buff;
size_t lname = (name ? strlen(name) : 0);
if ((2*sz + 2 + lname) > bsz) return 0;
*(r++) = '_';
r = SWIG_PackData(r,ptr,sz);
if (lname) {
strncpy(r,name,lname+1);
} else {
*r = 0;
}
return buff;
}
SWIGRUNTIME const char *
SWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {
if (*c != '_') {
if (strcmp(c,"NULL") == 0) {
memset(ptr,0,sz);
return name;
} else {
return 0;
}
}
return SWIG_UnpackData(++c,ptr,sz);
}
#ifdef __cplusplus
}
#endif
/***********************************************************************
* pyrun.swg
*
* This file contains the runtime support for Python modules
* and includes code for managing global variables and pointer
* type checking.
*
* Author : David Beazley (beazley@cs.uchicago.edu)
************************************************************************/
/* Common SWIG API */
#define SWIG_ConvertPtr(obj, pp, type, flags) SWIG_Python_ConvertPtr(obj, pp, type, flags)
#define SWIG_NewPointerObj(p, type, flags) SWIG_Python_NewPointerObj(p, type, flags)
#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)
/* Python-specific SWIG API */
#define SWIG_ConvertPacked(obj, ptr, sz, ty, flags) SWIG_Python_ConvertPacked(obj, ptr, sz, ty, flags)
#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)
/* Runtime API */
#define SWIG_GetModule(clientdata) SWIG_Python_GetModule()
#define SWIG_SetModule(clientdata, pointer) SWIG_Python_SetModule(pointer)
/* -----------------------------------------------------------------------------
* Pointer declarations
* ----------------------------------------------------------------------------- */
/*
Use SWIG_NO_COBJECT_TYPES to force the use of strings to represent
C/C++ pointers in the python side. Very useful for debugging, but
not always safe.
*/
#if !defined(SWIG_NO_COBJECT_TYPES) && !defined(SWIG_COBJECT_TYPES)
# define SWIG_COBJECT_TYPES
#endif
/* Flags for pointer conversion */
#define SWIG_POINTER_EXCEPTION 0x1
#define SWIG_POINTER_DISOWN 0x2
/* Add PyOS_snprintf for old Pythons */
#if PY_VERSION_HEX < 0x02020000
#define PyOS_snprintf snprintf
#endif
#ifdef __cplusplus
extern "C" {
#endif
/* -----------------------------------------------------------------------------
* Create a new pointer string
* ----------------------------------------------------------------------------- */
#ifndef SWIG_BUFFER_SIZE
#define SWIG_BUFFER_SIZE 1024
#endif
#if defined(SWIG_COBJECT_TYPES)
#if !defined(SWIG_COBJECT_PYTHON)
/* -----------------------------------------------------------------------------
* Implements a simple Swig Object type, and use it instead of PyCObject
* ----------------------------------------------------------------------------- */
typedef struct {
PyObject_HEAD
void *ptr;
const char *desc;
} PySwigObject;
/* Declarations for objects of type PySwigObject */
SWIGRUNTIME int
PySwigObject_print(PySwigObject *v, FILE *fp, int flags)
{
char result[SWIG_BUFFER_SIZE];
flags = flags;
if (SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result))) {
fputs("<Swig Object at ", fp); fputs(result, fp); fputs(">", fp);
return 0;
} else {
return 1;
}
}
SWIGRUNTIME PyObject *
PySwigObject_repr(PySwigObject *v)
{
char result[SWIG_BUFFER_SIZE];
return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?
PyString_FromFormat("<Swig Object at %s>", result) : 0;
}
SWIGRUNTIME PyObject *
PySwigObject_str(PySwigObject *v)
{
char result[SWIG_BUFFER_SIZE];
return SWIG_PackVoidPtr(result, v->ptr, v->desc, sizeof(result)) ?
PyString_FromString(result) : 0;
}
SWIGRUNTIME PyObject *
PySwigObject_long(PySwigObject *v)
{
return PyLong_FromVoidPtr(v->ptr);
}
SWIGRUNTIME PyObject *
PySwigObject_format(const char* fmt, PySwigObject *v)
{
PyObject *res = NULL;
PyObject *args = PyTuple_New(1);
if (args && (PyTuple_SetItem(args, 0, PySwigObject_long(v)) == 0)) {
PyObject *ofmt = PyString_FromString(fmt);
if (ofmt) {
res = PyString_Format(ofmt,args);
Py_DECREF(ofmt);
}
Py_DECREF(args);
}
return res;
}
SWIGRUNTIME PyObject *
PySwigObject_oct(PySwigObject *v)
{
return PySwigObject_format("%o",v);
}
SWIGRUNTIME PyObject *
PySwigObject_hex(PySwigObject *v)
{
return PySwigObject_format("%x",v);
}
SWIGRUNTIME int
PySwigObject_compare(PySwigObject *v, PySwigObject *w)
{
int c = strcmp(v->desc, w->desc);
if (c) {
return (c > 0) ? 1 : -1;
} else {
void *i = v->ptr;
void *j = w->ptr;
return (i < j) ? -1 : ((i > j) ? 1 : 0);
}
}
SWIGRUNTIME void
PySwigObject_dealloc(PySwigObject *self)
{
PyObject_DEL(self);
}
SWIGRUNTIME PyTypeObject*
PySwigObject_type(void) {
static char pyswigobject_type__doc__[] =
"Swig object carries a C/C++ instance pointer";
static PyNumberMethods PySwigObject_as_number = {
(binaryfunc)0, /*nb_add*/
(binaryfunc)0, /*nb_subtract*/
(binaryfunc)0, /*nb_multiply*/
(binaryfunc)0, /*nb_divide*/
(binaryfunc)0, /*nb_remainder*/
(binaryfunc)0, /*nb_divmod*/
(ternaryfunc)0,/*nb_power*/
(unaryfunc)0, /*nb_negative*/
(unaryfunc)0, /*nb_positive*/
(unaryfunc)0, /*nb_absolute*/
(inquiry)0, /*nb_nonzero*/
0, /*nb_invert*/
0, /*nb_lshift*/
0, /*nb_rshift*/
0, /*nb_and*/
0, /*nb_xor*/
0, /*nb_or*/
(coercion)0, /*nb_coerce*/
(unaryfunc)PySwigObject_long, /*nb_int*/
(unaryfunc)PySwigObject_long, /*nb_long*/
(unaryfunc)0, /*nb_float*/
(unaryfunc)PySwigObject_oct, /*nb_oct*/
(unaryfunc)PySwigObject_hex, /*nb_hex*/
#if PY_VERSION_HEX >= 0x02000000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */
#endif
};
static PyTypeObject pyswigobject_type
#if !defined(__cplusplus)
;
static int type_init = 0;
if (!type_init) {
PyTypeObject tmp
#endif
= {
PyObject_HEAD_INIT(&PyType_Type)
0, /*ob_size*/
"PySwigObject", /*tp_name*/
sizeof(PySwigObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor)PySwigObject_dealloc, /*tp_dealloc*/
(printfunc)PySwigObject_print, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc)PySwigObject_compare, /*tp_compare*/
(reprfunc)PySwigObject_repr, /*tp_repr*/
&PySwigObject_as_number, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
(hashfunc)0, /*tp_hash*/
(ternaryfunc)0, /*tp_call*/
(reprfunc)PySwigObject_str, /*tp_str*/
/* Space for future expansion */
0,0,0,0,
pyswigobject_type__doc__, /* Documentation string */
#if PY_VERSION_HEX >= 0x02000000
0, /* tp_traverse */
0, /* tp_clear */
#endif
#if PY_VERSION_HEX >= 0x02010000
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
#endif
#if PY_VERSION_HEX >= 0x02020000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */
#endif
#if PY_VERSION_HEX >= 0x02030000
0, /* tp_del */
#endif
#ifdef COUNT_ALLOCS
0,0,0,0 /* tp_alloc -> tp_next */
#endif
};
#if !defined(__cplusplus)
pyswigobject_type = tmp;
type_init = 1;
}
#endif
return &pyswigobject_type;
}
SWIGRUNTIME PyObject *
PySwigObject_FromVoidPtrAndDesc(void *ptr, const char *desc)
{
PySwigObject *self = PyObject_NEW(PySwigObject, PySwigObject_type());
if (self) {
self->ptr = ptr;
self->desc = desc;
}
return (PyObject *)self;
}
SWIGRUNTIMEINLINE void *
PySwigObject_AsVoidPtr(PyObject *self)
{
return ((PySwigObject *)self)->ptr;
}
SWIGRUNTIMEINLINE const char *
PySwigObject_GetDesc(PyObject *self)
{
return ((PySwigObject *)self)->desc;
}
SWIGRUNTIMEINLINE int
PySwigObject_Check(PyObject *op) {
return ((op)->ob_type == PySwigObject_type())
|| (strcmp((op)->ob_type->tp_name,"PySwigObject") == 0);
}
/* -----------------------------------------------------------------------------
* Implements a simple Swig Packed type, and use it instead of string
* ----------------------------------------------------------------------------- */
typedef struct {
PyObject_HEAD
void *pack;
const char *desc;
size_t size;
} PySwigPacked;
SWIGRUNTIME int
PySwigPacked_print(PySwigPacked *v, FILE *fp, int flags)
{
char result[SWIG_BUFFER_SIZE];
flags = flags;
fputs("<Swig Packed ", fp);
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {
fputs("at ", fp);
fputs(result, fp);
}
fputs(v->desc,fp);
fputs(">", fp);
return 0;
}
SWIGRUNTIME PyObject *
PySwigPacked_repr(PySwigPacked *v)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {
return PyString_FromFormat("<Swig Packed at %s%s>", result, v->desc);
} else {
return PyString_FromFormat("<Swig Packed %s>", v->desc);
}
}
SWIGRUNTIME PyObject *
PySwigPacked_str(PySwigPacked *v)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){
return PyString_FromFormat("%s%s", result, v->desc);
} else {
return PyString_FromFormat("%s", v->desc);
}
}
SWIGRUNTIME int
PySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)
{
int c = strcmp(v->desc, w->desc);
if (c) {
return (c > 0) ? 1 : -1;
} else {
size_t i = v->size;
size_t j = w->size;
int s = (i < j) ? -1 : ((i > j) ? 1 : 0);
return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);
}
}
SWIGRUNTIME void
PySwigPacked_dealloc(PySwigPacked *self)
{
free(self->pack);
PyObject_DEL(self);
}
SWIGRUNTIME PyTypeObject*
PySwigPacked_type(void) {
static char pyswigpacked_type__doc__[] =
"Swig object carries a C/C++ instance pointer";
static PyTypeObject pyswigpacked_type
#if !defined(__cplusplus)
;
static int type_init = 0;
if (!type_init) {
PyTypeObject tmp
#endif
= {
PyObject_HEAD_INIT(&PyType_Type)
0, /*ob_size*/
"PySwigPacked", /*tp_name*/
sizeof(PySwigPacked), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor)PySwigPacked_dealloc, /*tp_dealloc*/
(printfunc)PySwigPacked_print, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc)PySwigPacked_compare, /*tp_compare*/
(reprfunc)PySwigPacked_repr, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
(hashfunc)0, /*tp_hash*/
(ternaryfunc)0, /*tp_call*/
(reprfunc)PySwigPacked_str, /*tp_str*/
/* Space for future expansion */
0,0,0,0,
pyswigpacked_type__doc__, /* Documentation string */
#if PY_VERSION_HEX >= 0x02000000
0, /* tp_traverse */
0, /* tp_clear */
#endif
#if PY_VERSION_HEX >= 0x02010000
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
#endif
#if PY_VERSION_HEX >= 0x02020000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* tp_iter -> tp_weaklist */
#endif
#if PY_VERSION_HEX >= 0x02030000
0, /* tp_del */
#endif
#ifdef COUNT_ALLOCS
0,0,0,0 /* tp_alloc -> tp_next */
#endif
};
#if !defined(__cplusplus)
pyswigpacked_type = tmp;
type_init = 1;
}
#endif
return &pyswigpacked_type;
}
SWIGRUNTIME PyObject *
PySwigPacked_FromDataAndDesc(void *ptr, size_t size, const char *desc)
{
PySwigPacked *self = PyObject_NEW(PySwigPacked, PySwigPacked_type());
if (self == NULL) {
return NULL;
} else {
void *pack = malloc(size);
if (pack) {
memcpy(pack, ptr, size);
self->pack = pack;
self->desc = desc;
self->size = size;
return (PyObject *) self;
}
return NULL;
}
}
SWIGRUNTIMEINLINE const char *
PySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)
{
PySwigPacked *self = (PySwigPacked *)obj;
if (self->size != size) return 0;
memcpy(ptr, self->pack, size);
return self->desc;
}
SWIGRUNTIMEINLINE const char *
PySwigPacked_GetDesc(PyObject *self)
{
return ((PySwigPacked *)self)->desc;
}
SWIGRUNTIMEINLINE int
PySwigPacked_Check(PyObject *op) {
return ((op)->ob_type == PySwigPacked_type())
|| (strcmp((op)->ob_type->tp_name,"PySwigPacked") == 0);
}
#else
/* -----------------------------------------------------------------------------
* Use the old Python PyCObject instead of PySwigObject
* ----------------------------------------------------------------------------- */
#define PySwigObject_GetDesc(obj) PyCObject_GetDesc(obj)
#define PySwigObject_Check(obj) PyCObject_Check(obj)
#define PySwigObject_AsVoidPtr(obj) PyCObject_AsVoidPtr(obj)
#define PySwigObject_FromVoidPtrAndDesc(p, d) PyCObject_FromVoidPtrAndDesc(p, d, NULL)
#endif
#endif
/* -----------------------------------------------------------------------------
* errors manipulation
* ----------------------------------------------------------------------------- */
SWIGRUNTIME void
SWIG_Python_TypeError(const char *type, PyObject *obj)
{
if (type) {
#if defined(SWIG_COBJECT_TYPES)
if (obj && PySwigObject_Check(obj)) {
const char *otype = (const char *) PySwigObject_GetDesc(obj);
if (otype) {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, 'PySwigObject(%s)' is received",
type, otype);
return;
}
} else
#endif
{
const char *otype = (obj ? obj->ob_type->tp_name : 0);
if (otype) {
PyObject *str = PyObject_Str(obj);
const char *cstr = str ? PyString_AsString(str) : 0;
if (cstr) {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, '%s(%s)' is received",
type, otype, cstr);
} else {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, '%s' is received",
type, otype);
}
Py_XDECREF(str);
return;
}
}
PyErr_Format(PyExc_TypeError, "a '%s' is expected", type);
} else {
PyErr_Format(PyExc_TypeError, "unexpected type is received");
}
}
SWIGRUNTIMEINLINE void
SWIG_Python_NullRef(const char *type)
{
if (type) {
PyErr_Format(PyExc_TypeError, "null reference of type '%s' was received",type);
} else {
PyErr_Format(PyExc_TypeError, "null reference was received");
}
}
SWIGRUNTIME int
SWIG_Python_AddErrMesg(const char* mesg, int infront)
{
if (PyErr_Occurred()) {
PyObject *type = 0;
PyObject *value = 0;
PyObject *traceback = 0;
PyErr_Fetch(&type, &value, &traceback);
if (value) {
PyObject *old_str = PyObject_Str(value);
Py_XINCREF(type);
PyErr_Clear();
if (infront) {
PyErr_Format(type, "%s %s", mesg, PyString_AsString(old_str));
} else {
PyErr_Format(type, "%s %s", PyString_AsString(old_str), mesg);
}
Py_DECREF(old_str);
}
return 1;
} else {
return 0;
}
}
SWIGRUNTIME int
SWIG_Python_ArgFail(int argnum)
{
if (PyErr_Occurred()) {
/* add information about failing argument */
char mesg[256];
PyOS_snprintf(mesg, sizeof(mesg), "argument number %d:", argnum);
return SWIG_Python_AddErrMesg(mesg, 1);
} else {
return 0;
}
}
/* -----------------------------------------------------------------------------
* pointers/data manipulation
* ----------------------------------------------------------------------------- */
/* Convert a pointer value */
SWIGRUNTIME int
SWIG_Python_ConvertPtr(PyObject *obj, void **ptr, swig_type_info *ty, int flags) {
swig_cast_info *tc;
const char *c = 0;
static PyObject *SWIG_this = 0;
int newref = 0;
PyObject *pyobj = 0;
void *vptr;
if (!obj) return 0;
if (obj == Py_None) {
*ptr = 0;
return 0;
}
#ifdef SWIG_COBJECT_TYPES
if (!(PySwigObject_Check(obj))) {
if (!SWIG_this)
SWIG_this = PyString_FromString("this");
pyobj = obj;
obj = PyObject_GetAttr(obj,SWIG_this);
newref = 1;
if (!obj) goto type_error;
if (!PySwigObject_Check(obj)) {
Py_DECREF(obj);
goto type_error;
}
}
vptr = PySwigObject_AsVoidPtr(obj);
c = (const char *) PySwigObject_GetDesc(obj);
if (newref) { Py_DECREF(obj); }
goto type_check;
#else
if (!(PyString_Check(obj))) {
if (!SWIG_this)
SWIG_this = PyString_FromString("this");
pyobj = obj;
obj = PyObject_GetAttr(obj,SWIG_this);
newref = 1;
if (!obj) goto type_error;
if (!PyString_Check(obj)) {
Py_DECREF(obj);
goto type_error;
}
}
c = PyString_AS_STRING(obj);
/* Pointer values must start with leading underscore */
c = SWIG_UnpackVoidPtr(c, &vptr, ty->name);
if (newref) { Py_DECREF(obj); }
if (!c) goto type_error;
#endif
type_check:
if (ty) {
tc = SWIG_TypeCheck(c,ty);
if (!tc) goto type_error;
*ptr = SWIG_TypeCast(tc,vptr);
} else {
*ptr = vptr;
}
if ((pyobj) && (flags & SWIG_POINTER_DISOWN)) {
PyObject_SetAttrString(pyobj,(char*)"thisown",Py_False);
}
return 0;
type_error:
PyErr_Clear();
if (pyobj && !obj) {
obj = pyobj;
if (PyCFunction_Check(obj)) {
/* here we get the method pointer for callbacks */
char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);
c = doc ? strstr(doc, "swig_ptr: ") : 0;
if (c) {
c = ty ? SWIG_UnpackVoidPtr(c + 10, &vptr, ty->name) : 0;
if (!c) goto type_error;
goto type_check;
}
}
}
if (flags & SWIG_POINTER_EXCEPTION) {
if (ty) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
} else {
SWIG_Python_TypeError("C/C++ pointer", obj);
}
}
return -1;
}
/* Convert a pointer value, signal an exception on a type mismatch */
SWIGRUNTIME void *
SWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {
void *result;
if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {
PyErr_Clear();
if (flags & SWIG_POINTER_EXCEPTION) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
SWIG_Python_ArgFail(argnum);
}
}
return result;
}
/* Convert a packed value value */
SWIGRUNTIME int
SWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty, int flags) {
swig_cast_info *tc;
const char *c = 0;
#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)
c = PySwigPacked_UnpackData(obj, ptr, sz);
#else
if ((!obj) || (!PyString_Check(obj))) goto type_error;
c = PyString_AS_STRING(obj);
/* Pointer values must start with leading underscore */
c = SWIG_UnpackDataName(c, ptr, sz, ty->name);
#endif
if (!c) goto type_error;
if (ty) {
tc = SWIG_TypeCheck(c,ty);
if (!tc) goto type_error;
}
return 0;
type_error:
PyErr_Clear();
if (flags & SWIG_POINTER_EXCEPTION) {
if (ty) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
} else {
SWIG_Python_TypeError("C/C++ packed data", obj);
}
}
return -1;
}
/* Create a new array object */
SWIGRUNTIME PyObject *
SWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int own) {
PyObject *robj = 0;
if (!type) {
if (!PyErr_Occurred()) {
PyErr_Format(PyExc_TypeError, "Swig: null type passed to NewPointerObj");
}
return robj;
}
if (!ptr) {
Py_INCREF(Py_None);
return Py_None;
}
#ifdef SWIG_COBJECT_TYPES
robj = PySwigObject_FromVoidPtrAndDesc((void *) ptr, (char *)type->name);
#else
{
char result[SWIG_BUFFER_SIZE];
robj = SWIG_PackVoidPtr(result, ptr, type->name, sizeof(result)) ?
PyString_FromString(result) : 0;
}
#endif
if (!robj || (robj == Py_None)) return robj;
if (type->clientdata) {
PyObject *inst;
PyObject *args = Py_BuildValue((char*)"(O)", robj);
Py_DECREF(robj);
inst = PyObject_CallObject((PyObject *) type->clientdata, args);
Py_DECREF(args);
if (inst) {
if (own) {
PyObject_SetAttrString(inst,(char*)"thisown",Py_True);
}
robj = inst;
}
}
return robj;
}
SWIGRUNTIME PyObject *
SWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {
PyObject *robj = 0;
if (!ptr) {
Py_INCREF(Py_None);
return Py_None;
}
#if defined(SWIG_COBJECT_TYPES) && !defined(SWIG_COBJECT_PYTHON)
robj = PySwigPacked_FromDataAndDesc((void *) ptr, sz, (char *)type->name);
#else
{
char result[SWIG_BUFFER_SIZE];
robj = SWIG_PackDataName(result, ptr, sz, type->name, sizeof(result)) ?
PyString_FromString(result) : 0;
}
#endif
return robj;
}
/* -----------------------------------------------------------------------------*
* Get type list
* -----------------------------------------------------------------------------*/
#ifdef SWIG_LINK_RUNTIME
void *SWIG_ReturnGlobalTypeList(void *);
#endif
SWIGRUNTIME swig_module_info *
SWIG_Python_GetModule(void) {
static void *type_pointer = (void *)0;
/* first check if module already created */
if (!type_pointer) {
#ifdef SWIG_LINK_RUNTIME
type_pointer = SWIG_ReturnGlobalTypeList((void *)0);
#else
type_pointer = PyCObject_Import((char*)"swig_runtime_data" SWIG_RUNTIME_VERSION,
(char*)"type_pointer" SWIG_TYPE_TABLE_NAME);
if (PyErr_Occurred()) {
PyErr_Clear();
type_pointer = (void *)0;
}
}
#endif
return (swig_module_info *) type_pointer;
}
SWIGRUNTIME void
SWIG_Python_SetModule(swig_module_info *swig_module) {
static PyMethodDef swig_empty_runtime_method_table[] = { {NULL, NULL, 0, NULL} };/* Sentinel */
PyObject *module = Py_InitModule((char*)"swig_runtime_data" SWIG_RUNTIME_VERSION,
swig_empty_runtime_method_table);
PyObject *pointer = PyCObject_FromVoidPtr((void *) swig_module, NULL);
if (pointer && module) {
PyModule_AddObject(module, (char*)"type_pointer" SWIG_TYPE_TABLE_NAME, pointer);
}
}
#ifdef __cplusplus
}
#endif
/* -----------------------------------------------------------------------------*
Standard SWIG API for use inside user code.
Don't include this file directly, run the command
swig -python -external-runtime
Also, read the Modules chapter of the SWIG Manual.
* -----------------------------------------------------------------------------*/
#ifdef SWIG_MODULE_CLIENTDATA_TYPE
SWIGRUNTIMEINLINE swig_type_info *
SWIG_TypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {
swig_module_info *module = SWIG_GetModule(clientdata);
return SWIG_TypeQueryModule(module, module, name);
}
SWIGRUNTIMEINLINE swig_type_info *
SWIG_MangledTypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {
swig_module_info *module = SWIG_GetModule(clientdata);
return SWIG_MangledTypeQueryModule(module, module, name);
}
#else
SWIGRUNTIMEINLINE swig_type_info *
SWIG_TypeQuery(const char *name) {
swig_module_info *module = SWIG_GetModule();
return SWIG_TypeQueryModule(module, module, name);
}
SWIGRUNTIMEINLINE swig_type_info *
SWIG_MangledTypeQuery(const char *name) {
swig_module_info *module = SWIG_GetModule();
return SWIG_MangledTypeQueryModule(module, module, name);
}
#endif
"""
######################################################################
# This is for SWIG-1.3.28 and higher.
# SWIG_RUNTIME_VERSION == "3"
# All this does is to include the contents of the file generated by
# this command:
# swig -python -external-runtime
swigptr2_code_v3 = """
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 1.3.30
*
* This file is not intended to be easily readable and contains a number of
* coding conventions designed to improve portability and efficiency. Do not make
* changes to this file unless you know what you are doing--modify the SWIG
* interface file instead.
* ----------------------------------------------------------------------------- */
/* -----------------------------------------------------------------------------
* This section contains generic SWIG labels for method/variable
* declarations/attributes, and other compiler dependent labels.
* ----------------------------------------------------------------------------- */
/* template workaround for compilers that cannot correctly implement the C++ standard */
#ifndef SWIGTEMPLATEDISAMBIGUATOR
# if defined(__SUNPRO_CC)
# if (__SUNPRO_CC <= 0x560)
# define SWIGTEMPLATEDISAMBIGUATOR template
# else
# define SWIGTEMPLATEDISAMBIGUATOR
# endif
# else
# define SWIGTEMPLATEDISAMBIGUATOR
# endif
#endif
/* inline attribute */
#ifndef SWIGINLINE
# if defined(__cplusplus) || (defined(__GNUC__) && !defined(__STRICT_ANSI__))
# define SWIGINLINE inline
# else
# define SWIGINLINE
# endif
#endif
/* attribute recognised by some compilers to avoid 'unused' warnings */
#ifndef SWIGUNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define SWIGUNUSED __attribute__ ((__unused__))
# else
# define SWIGUNUSED
# endif
# elif defined(__ICC)
# define SWIGUNUSED __attribute__ ((__unused__))
# else
# define SWIGUNUSED
# endif
#endif
#ifndef SWIGUNUSEDPARM
# ifdef __cplusplus
# define SWIGUNUSEDPARM(p)
# else
# define SWIGUNUSEDPARM(p) p SWIGUNUSED
# endif
#endif
/* internal SWIG method */
#ifndef SWIGINTERN
# define SWIGINTERN static SWIGUNUSED
#endif
/* internal inline SWIG method */
#ifndef SWIGINTERNINLINE
# define SWIGINTERNINLINE SWIGINTERN SWIGINLINE
#endif
/* exporting methods */
#if (__GNUC__ >= 4) || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)
# ifndef GCC_HASCLASSVISIBILITY
# define GCC_HASCLASSVISIBILITY
# endif
#endif
#ifndef SWIGEXPORT
# if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)
# if defined(STATIC_LINKED)
# define SWIGEXPORT
# else
# define SWIGEXPORT __declspec(dllexport)
# endif
# else
# if defined(__GNUC__) && defined(GCC_HASCLASSVISIBILITY)
# define SWIGEXPORT __attribute__ ((visibility("default")))
# else
# define SWIGEXPORT
# endif
# endif
#endif
/* calling conventions for Windows */
#ifndef SWIGSTDCALL
# if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)
# define SWIGSTDCALL __stdcall
# else
# define SWIGSTDCALL
# endif
#endif
/* Deal with Microsoft's attempt at deprecating C standard runtime functions */
#if !defined(SWIG_NO_CRT_SECURE_NO_DEPRECATE) && defined(_MSC_VER) && !defined(_CRT_SECURE_NO_DEPRECATE)
# define _CRT_SECURE_NO_DEPRECATE
#endif
/* Errors in SWIG */
#define SWIG_UnknownError -1
#define SWIG_IOError -2
#define SWIG_RuntimeError -3
#define SWIG_IndexError -4
#define SWIG_TypeError -5
#define SWIG_DivisionByZero -6
#define SWIG_OverflowError -7
#define SWIG_SyntaxError -8
#define SWIG_ValueError -9
#define SWIG_SystemError -10
#define SWIG_AttributeError -11
#define SWIG_MemoryError -12
#define SWIG_NullReferenceError -13
/* Errors in SWIG */
#define SWIG_UnknownError -1
#define SWIG_IOError -2
#define SWIG_RuntimeError -3
#define SWIG_IndexError -4
#define SWIG_TypeError -5
#define SWIG_DivisionByZero -6
#define SWIG_OverflowError -7
#define SWIG_SyntaxError -8
#define SWIG_ValueError -9
#define SWIG_SystemError -10
#define SWIG_AttributeError -11
#define SWIG_MemoryError -12
#define SWIG_NullReferenceError -13
/* -----------------------------------------------------------------------------
* swigrun.swg
*
* This file contains generic CAPI SWIG runtime support for pointer
* type checking.
* ----------------------------------------------------------------------------- */
/* This should only be incremented when either the layout of swig_type_info changes,
or for whatever reason, the runtime changes incompatibly */
#define SWIG_RUNTIME_VERSION "3"
/* define SWIG_TYPE_TABLE_NAME as "SWIG_TYPE_TABLE" */
#ifdef SWIG_TYPE_TABLE
# define SWIG_QUOTE_STRING(x) #x
# define SWIG_EXPAND_AND_QUOTE_STRING(x) SWIG_QUOTE_STRING(x)
# define SWIG_TYPE_TABLE_NAME SWIG_EXPAND_AND_QUOTE_STRING(SWIG_TYPE_TABLE)
#else
# define SWIG_TYPE_TABLE_NAME
#endif
/*
You can use the SWIGRUNTIME and SWIGRUNTIMEINLINE macros for
creating a static or dynamic library from the swig runtime code.
In 99.9% of the cases, swig just needs to declare them as 'static'.
But only do this if is strictly necessary, ie, if you have problems
with your compiler or so.
*/
#ifndef SWIGRUNTIME
# define SWIGRUNTIME SWIGINTERN
#endif
#ifndef SWIGRUNTIMEINLINE
# define SWIGRUNTIMEINLINE SWIGRUNTIME SWIGINLINE
#endif
/* Generic buffer size */
#ifndef SWIG_BUFFER_SIZE
# define SWIG_BUFFER_SIZE 1024
#endif
/* Flags for pointer conversions */
#define SWIG_POINTER_DISOWN 0x1
/* Flags for new pointer objects */
#define SWIG_POINTER_OWN 0x1
/*
Flags/methods for returning states.
The swig conversion methods, as ConvertPtr, return and integer
that tells if the conversion was successful or not. And if not,
an error code can be returned (see swigerrors.swg for the codes).
Use the following macros/flags to set or process the returning
states.
In old swig versions, you usually write code as:
if (SWIG_ConvertPtr(obj,vptr,ty.flags) != -1) {
// success code
} else {
//fail code
}
Now you can be more explicit as:
int res = SWIG_ConvertPtr(obj,vptr,ty.flags);
if (SWIG_IsOK(res)) {
// success code
} else {
// fail code
}
that seems to be the same, but now you can also do
Type *ptr;
int res = SWIG_ConvertPtr(obj,(void **)(&ptr),ty.flags);
if (SWIG_IsOK(res)) {
// success code
if (SWIG_IsNewObj(res) {
...
delete *ptr;
} else {
...
}
} else {
// fail code
}
I.e., now SWIG_ConvertPtr can return new objects and you can
identify the case and take care of the deallocation. Of course that
requires also to SWIG_ConvertPtr to return new result values, as
int SWIG_ConvertPtr(obj, ptr,...) {
if (<obj is ok>) {
if (<need new object>) {
*ptr = <ptr to new allocated object>;
return SWIG_NEWOBJ;
} else {
*ptr = <ptr to old object>;
return SWIG_OLDOBJ;
}
} else {
return SWIG_BADOBJ;
}
}
Of course, returning the plain '0(success)/-1(fail)' still works, but you can be
more explicit by returning SWIG_BADOBJ, SWIG_ERROR or any of the
swig errors code.
Finally, if the SWIG_CASTRANK_MODE is enabled, the result code
allows to return the 'cast rank', for example, if you have this
int food(double)
int fooi(int);
and you call
food(1) // cast rank '1' (1 -> 1.0)
fooi(1) // cast rank '0'
just use the SWIG_AddCast()/SWIG_CheckState()
*/
#define SWIG_OK (0)
#define SWIG_ERROR (-1)
#define SWIG_IsOK(r) (r >= 0)
#define SWIG_ArgError(r) ((r != SWIG_ERROR) ? r : SWIG_TypeError)
/* The CastRankLimit says how many bits are used for the cast rank */
#define SWIG_CASTRANKLIMIT (1 << 8)
/* The NewMask denotes the object was created (using new/malloc) */
#define SWIG_NEWOBJMASK (SWIG_CASTRANKLIMIT << 1)
/* The TmpMask is for in/out typemaps that use temporal objects */
#define SWIG_TMPOBJMASK (SWIG_NEWOBJMASK << 1)
/* Simple returning values */
#define SWIG_BADOBJ (SWIG_ERROR)
#define SWIG_OLDOBJ (SWIG_OK)
#define SWIG_NEWOBJ (SWIG_OK | SWIG_NEWOBJMASK)
#define SWIG_TMPOBJ (SWIG_OK | SWIG_TMPOBJMASK)
/* Check, add and del mask methods */
#define SWIG_AddNewMask(r) (SWIG_IsOK(r) ? (r | SWIG_NEWOBJMASK) : r)
#define SWIG_DelNewMask(r) (SWIG_IsOK(r) ? (r & ~SWIG_NEWOBJMASK) : r)
#define SWIG_IsNewObj(r) (SWIG_IsOK(r) && (r & SWIG_NEWOBJMASK))
#define SWIG_AddTmpMask(r) (SWIG_IsOK(r) ? (r | SWIG_TMPOBJMASK) : r)
#define SWIG_DelTmpMask(r) (SWIG_IsOK(r) ? (r & ~SWIG_TMPOBJMASK) : r)
#define SWIG_IsTmpObj(r) (SWIG_IsOK(r) && (r & SWIG_TMPOBJMASK))
/* Cast-Rank Mode */
#if defined(SWIG_CASTRANK_MODE)
# ifndef SWIG_TypeRank
# define SWIG_TypeRank unsigned long
# endif
# ifndef SWIG_MAXCASTRANK /* Default cast allowed */
# define SWIG_MAXCASTRANK (2)
# endif
# define SWIG_CASTRANKMASK ((SWIG_CASTRANKLIMIT) -1)
# define SWIG_CastRank(r) (r & SWIG_CASTRANKMASK)
SWIGINTERNINLINE int SWIG_AddCast(int r) {
return SWIG_IsOK(r) ? ((SWIG_CastRank(r) < SWIG_MAXCASTRANK) ? (r + 1) : SWIG_ERROR) : r;
}
SWIGINTERNINLINE int SWIG_CheckState(int r) {
return SWIG_IsOK(r) ? SWIG_CastRank(r) + 1 : 0;
}
#else /* no cast-rank mode */
# define SWIG_AddCast
# define SWIG_CheckState(r) (SWIG_IsOK(r) ? 1 : 0)
#endif
#include <string.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef void *(*swig_converter_func)(void *);
typedef struct swig_type_info *(*swig_dycast_func)(void **);
/* Structure to store inforomation on one type */
typedef struct swig_type_info {
const char *name; /* mangled name of this type */
const char *str; /* human readable name of this type */
swig_dycast_func dcast; /* dynamic cast function down a hierarchy */
struct swig_cast_info *cast; /* linked list of types that can cast into this type */
void *clientdata; /* language specific type data */
int owndata; /* flag if the structure owns the clientdata */
} swig_type_info;
/* Structure to store a type and conversion function used for casting */
typedef struct swig_cast_info {
swig_type_info *type; /* pointer to type that is equivalent to this type */
swig_converter_func converter; /* function to cast the void pointers */
struct swig_cast_info *next; /* pointer to next cast in linked list */
struct swig_cast_info *prev; /* pointer to the previous cast */
} swig_cast_info;
/* Structure used to store module information
* Each module generates one structure like this, and the runtime collects
* all of these structures and stores them in a circularly linked list.*/
typedef struct swig_module_info {
swig_type_info **types; /* Array of pointers to swig_type_info structures that are in this module */
size_t size; /* Number of types in this module */
struct swig_module_info *next; /* Pointer to next element in circularly linked list */
swig_type_info **type_initial; /* Array of initially generated type structures */
swig_cast_info **cast_initial; /* Array of initially generated casting structures */
void *clientdata; /* Language specific module data */
} swig_module_info;
/*
Compare two type names skipping the space characters, therefore
"char*" == "char *" and "Class<int>" == "Class<int >", etc.
Return 0 when the two name types are equivalent, as in
strncmp, but skipping ' '.
*/
SWIGRUNTIME int
SWIG_TypeNameComp(const char *f1, const char *l1,
const char *f2, const char *l2) {
for (;(f1 != l1) && (f2 != l2); ++f1, ++f2) {
while ((*f1 == ' ') && (f1 != l1)) ++f1;
while ((*f2 == ' ') && (f2 != l2)) ++f2;
if (*f1 != *f2) return (*f1 > *f2) ? 1 : -1;
}
return (l1 - f1) - (l2 - f2);
}
/*
Check type equivalence in a name list like <name1>|<name2>|...
Return 0 if not equal, 1 if equal
*/
SWIGRUNTIME int
SWIG_TypeEquiv(const char *nb, const char *tb) {
int equiv = 0;
const char* te = tb + strlen(tb);
const char* ne = nb;
while (!equiv && *ne) {
for (nb = ne; *ne; ++ne) {
if (*ne == '|') break;
}
equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;
if (*ne) ++ne;
}
return equiv;
}
/*
Check type equivalence in a name list like <name1>|<name2>|...
Return 0 if equal, -1 if nb < tb, 1 if nb > tb
*/
SWIGRUNTIME int
SWIG_TypeCompare(const char *nb, const char *tb) {
int equiv = 0;
const char* te = tb + strlen(tb);
const char* ne = nb;
while (!equiv && *ne) {
for (nb = ne; *ne; ++ne) {
if (*ne == '|') break;
}
equiv = (SWIG_TypeNameComp(nb, ne, tb, te) == 0) ? 1 : 0;
if (*ne) ++ne;
}
return equiv;
}
/* think of this as a c++ template<> or a scheme macro */
#define SWIG_TypeCheck_Template(comparison, ty) \
if (ty) { \
swig_cast_info *iter = ty->cast; \
while (iter) { \
if (comparison) { \
if (iter == ty->cast) return iter; \
/* Move iter to the top of the linked list */ \
iter->prev->next = iter->next; \
if (iter->next) \
iter->next->prev = iter->prev; \
iter->next = ty->cast; \
iter->prev = 0; \
if (ty->cast) ty->cast->prev = iter; \
ty->cast = iter; \
return iter; \
} \
iter = iter->next; \
} \
} \
return 0
/*
Check the typename
*/
SWIGRUNTIME swig_cast_info *
SWIG_TypeCheck(const char *c, swig_type_info *ty) {
SWIG_TypeCheck_Template(strcmp(iter->type->name, c) == 0, ty);
}
/* Same as previous function, except strcmp is replaced with a pointer comparison */
SWIGRUNTIME swig_cast_info *
SWIG_TypeCheckStruct(swig_type_info *from, swig_type_info *into) {
SWIG_TypeCheck_Template(iter->type == from, into);
}
/*
Cast a pointer up an inheritance hierarchy
*/
SWIGRUNTIMEINLINE void *
SWIG_TypeCast(swig_cast_info *ty, void *ptr) {
return ((!ty) || (!ty->converter)) ? ptr : (*ty->converter)(ptr);
}
/*
Dynamic pointer casting. Down an inheritance hierarchy
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeDynamicCast(swig_type_info *ty, void **ptr) {
swig_type_info *lastty = ty;
if (!ty || !ty->dcast) return ty;
while (ty && (ty->dcast)) {
ty = (*ty->dcast)(ptr);
if (ty) lastty = ty;
}
return lastty;
}
/*
Return the name associated with this type
*/
SWIGRUNTIMEINLINE const char *
SWIG_TypeName(const swig_type_info *ty) {
return ty->name;
}
/*
Return the pretty name associated with this type,
that is an unmangled type name in a form presentable to the user.
*/
SWIGRUNTIME const char *
SWIG_TypePrettyName(const swig_type_info *type) {
/* The "str" field contains the equivalent pretty names of the
type, separated by vertical-bar characters. We choose
to print the last name, as it is often (?) the most
specific. */
if (!type) return NULL;
if (type->str != NULL) {
const char *last_name = type->str;
const char *s;
for (s = type->str; *s; s++)
if (*s == '|') last_name = s+1;
return last_name;
}
else
return type->name;
}
/*
Set the clientdata field for a type
*/
SWIGRUNTIME void
SWIG_TypeClientData(swig_type_info *ti, void *clientdata) {
swig_cast_info *cast = ti->cast;
/* if (ti->clientdata == clientdata) return; */
ti->clientdata = clientdata;
while (cast) {
if (!cast->converter) {
swig_type_info *tc = cast->type;
if (!tc->clientdata) {
SWIG_TypeClientData(tc, clientdata);
}
}
cast = cast->next;
}
}
SWIGRUNTIME void
SWIG_TypeNewClientData(swig_type_info *ti, void *clientdata) {
SWIG_TypeClientData(ti, clientdata);
ti->owndata = 1;
}
/*
Search for a swig_type_info structure only by mangled name
Search is a O(log #types)
We start searching at module start, and finish searching when start == end.
Note: if start == end at the beginning of the function, we go all the way around
the circular list.
*/
SWIGRUNTIME swig_type_info *
SWIG_MangledTypeQueryModule(swig_module_info *start,
swig_module_info *end,
const char *name) {
swig_module_info *iter = start;
do {
if (iter->size) {
register size_t l = 0;
register size_t r = iter->size - 1;
do {
/* since l+r >= 0, we can (>> 1) instead (/ 2) */
register size_t i = (l + r) >> 1;
const char *iname = iter->types[i]->name;
if (iname) {
register int compare = strcmp(name, iname);
if (compare == 0) {
return iter->types[i];
} else if (compare < 0) {
if (i) {
r = i - 1;
} else {
break;
}
} else if (compare > 0) {
l = i + 1;
}
} else {
break; /* should never happen */
}
} while (l <= r);
}
iter = iter->next;
} while (iter != end);
return 0;
}
/*
Search for a swig_type_info structure for either a mangled name or a human readable name.
It first searches the mangled names of the types, which is a O(log #types)
If a type is not found it then searches the human readable names, which is O(#types).
We start searching at module start, and finish searching when start == end.
Note: if start == end at the beginning of the function, we go all the way around
the circular list.
*/
SWIGRUNTIME swig_type_info *
SWIG_TypeQueryModule(swig_module_info *start,
swig_module_info *end,
const char *name) {
/* STEP 1: Search the name field using binary search */
swig_type_info *ret = SWIG_MangledTypeQueryModule(start, end, name);
if (ret) {
return ret;
} else {
/* STEP 2: If the type hasn't been found, do a complete search
of the str field (the human readable name) */
swig_module_info *iter = start;
do {
register size_t i = 0;
for (; i < iter->size; ++i) {
if (iter->types[i]->str && (SWIG_TypeEquiv(iter->types[i]->str, name)))
return iter->types[i];
}
iter = iter->next;
} while (iter != end);
}
/* neither found a match */
return 0;
}
/*
Pack binary data into a string
*/
SWIGRUNTIME char *
SWIG_PackData(char *c, void *ptr, size_t sz) {
static const char hex[17] = "0123456789abcdef";
register const unsigned char *u = (unsigned char *) ptr;
register const unsigned char *eu = u + sz;
for (; u != eu; ++u) {
register unsigned char uu = *u;
*(c++) = hex[(uu & 0xf0) >> 4];
*(c++) = hex[uu & 0xf];
}
return c;
}
/*
Unpack binary data from a string
*/
SWIGRUNTIME const char *
SWIG_UnpackData(const char *c, void *ptr, size_t sz) {
register unsigned char *u = (unsigned char *) ptr;
register const unsigned char *eu = u + sz;
for (; u != eu; ++u) {
register char d = *(c++);
register unsigned char uu;
if ((d >= '0') && (d <= '9'))
uu = ((d - '0') << 4);
else if ((d >= 'a') && (d <= 'f'))
uu = ((d - ('a'-10)) << 4);
else
return (char *) 0;
d = *(c++);
if ((d >= '0') && (d <= '9'))
uu |= (d - '0');
else if ((d >= 'a') && (d <= 'f'))
uu |= (d - ('a'-10));
else
return (char *) 0;
*u = uu;
}
return c;
}
/*
Pack 'void *' into a string buffer.
*/
SWIGRUNTIME char *
SWIG_PackVoidPtr(char *buff, void *ptr, const char *name, size_t bsz) {
char *r = buff;
if ((2*sizeof(void *) + 2) > bsz) return 0;
*(r++) = '_';
r = SWIG_PackData(r,&ptr,sizeof(void *));
if (strlen(name) + 1 > (bsz - (r - buff))) return 0;
strcpy(r,name);
return buff;
}
SWIGRUNTIME const char *
SWIG_UnpackVoidPtr(const char *c, void **ptr, const char *name) {
if (*c != '_') {
if (strcmp(c,"NULL") == 0) {
*ptr = (void *) 0;
return name;
} else {
return 0;
}
}
return SWIG_UnpackData(++c,ptr,sizeof(void *));
}
SWIGRUNTIME char *
SWIG_PackDataName(char *buff, void *ptr, size_t sz, const char *name, size_t bsz) {
char *r = buff;
size_t lname = (name ? strlen(name) : 0);
if ((2*sz + 2 + lname) > bsz) return 0;
*(r++) = '_';
r = SWIG_PackData(r,ptr,sz);
if (lname) {
strncpy(r,name,lname+1);
} else {
*r = 0;
}
return buff;
}
SWIGRUNTIME const char *
SWIG_UnpackDataName(const char *c, void *ptr, size_t sz, const char *name) {
if (*c != '_') {
if (strcmp(c,"NULL") == 0) {
memset(ptr,0,sz);
return name;
} else {
return 0;
}
}
return SWIG_UnpackData(++c,ptr,sz);
}
#ifdef __cplusplus
}
#endif
/* Python.h has to appear first */
#include <Python.h>
/* Add PyOS_snprintf for old Pythons */
#if PY_VERSION_HEX < 0x02020000
# if defined(_MSC_VER) || defined(__BORLANDC__) || defined(_WATCOM)
# define PyOS_snprintf _snprintf
# else
# define PyOS_snprintf snprintf
# endif
#endif
/* A crude PyString_FromFormat implementation for old Pythons */
#if PY_VERSION_HEX < 0x02020000
#ifndef SWIG_PYBUFFER_SIZE
# define SWIG_PYBUFFER_SIZE 1024
#endif
static PyObject *
PyString_FromFormat(const char *fmt, ...) {
va_list ap;
char buf[SWIG_PYBUFFER_SIZE * 2];
int res;
va_start(ap, fmt);
res = vsnprintf(buf, sizeof(buf), fmt, ap);
va_end(ap);
return (res < 0 || res >= (int)sizeof(buf)) ? 0 : PyString_FromString(buf);
}
#endif
/* Add PyObject_Del for old Pythons */
#if PY_VERSION_HEX < 0x01060000
# define PyObject_Del(op) PyMem_DEL((op))
#endif
#ifndef PyObject_DEL
# define PyObject_DEL PyObject_Del
#endif
/* A crude PyExc_StopIteration exception for old Pythons */
#if PY_VERSION_HEX < 0x02020000
# ifndef PyExc_StopIteration
# define PyExc_StopIteration PyExc_RuntimeError
# endif
# ifndef PyObject_GenericGetAttr
# define PyObject_GenericGetAttr 0
# endif
#endif
/* Py_NotImplemented is defined in 2.1 and up. */
#if PY_VERSION_HEX < 0x02010000
# ifndef Py_NotImplemented
# define Py_NotImplemented PyExc_RuntimeError
# endif
#endif
/* A crude PyString_AsStringAndSize implementation for old Pythons */
#if PY_VERSION_HEX < 0x02010000
# ifndef PyString_AsStringAndSize
# define PyString_AsStringAndSize(obj, s, len) {*s = PyString_AsString(obj); *len = *s ? strlen(*s) : 0;}
# endif
#endif
/* PySequence_Size for old Pythons */
#if PY_VERSION_HEX < 0x02000000
# ifndef PySequence_Size
# define PySequence_Size PySequence_Length
# endif
#endif
/* PyBool_FromLong for old Pythons */
#if PY_VERSION_HEX < 0x02030000
static
PyObject *PyBool_FromLong(long ok)
{
PyObject *result = ok ? Py_True : Py_False;
Py_INCREF(result);
return result;
}
#endif
/* -----------------------------------------------------------------------------
* error manipulation
* ----------------------------------------------------------------------------- */
SWIGRUNTIME PyObject*
SWIG_Python_ErrorType(int code) {
PyObject* type = 0;
switch(code) {
case SWIG_MemoryError:
type = PyExc_MemoryError;
break;
case SWIG_IOError:
type = PyExc_IOError;
break;
case SWIG_RuntimeError:
type = PyExc_RuntimeError;
break;
case SWIG_IndexError:
type = PyExc_IndexError;
break;
case SWIG_TypeError:
type = PyExc_TypeError;
break;
case SWIG_DivisionByZero:
type = PyExc_ZeroDivisionError;
break;
case SWIG_OverflowError:
type = PyExc_OverflowError;
break;
case SWIG_SyntaxError:
type = PyExc_SyntaxError;
break;
case SWIG_ValueError:
type = PyExc_ValueError;
break;
case SWIG_SystemError:
type = PyExc_SystemError;
break;
case SWIG_AttributeError:
type = PyExc_AttributeError;
break;
default:
type = PyExc_RuntimeError;
}
return type;
}
SWIGRUNTIME void
SWIG_Python_AddErrorMsg(const char* mesg)
{
PyObject *type = 0;
PyObject *value = 0;
PyObject *traceback = 0;
if (PyErr_Occurred()) PyErr_Fetch(&type, &value, &traceback);
if (value) {
PyObject *old_str = PyObject_Str(value);
PyErr_Clear();
Py_XINCREF(type);
PyErr_Format(type, "%s %s", PyString_AsString(old_str), mesg);
Py_DECREF(old_str);
Py_DECREF(value);
} else {
PyErr_Format(PyExc_RuntimeError, mesg);
}
}
#if defined(SWIG_PYTHON_NO_THREADS)
# if defined(SWIG_PYTHON_THREADS)
# undef SWIG_PYTHON_THREADS
# endif
#endif
#if defined(SWIG_PYTHON_THREADS) /* Threading support is enabled */
# if !defined(SWIG_PYTHON_USE_GIL) && !defined(SWIG_PYTHON_NO_USE_GIL)
# if (PY_VERSION_HEX >= 0x02030000) /* For 2.3 or later, use the PyGILState calls */
# define SWIG_PYTHON_USE_GIL
# endif
# endif
# if defined(SWIG_PYTHON_USE_GIL) /* Use PyGILState threads calls */
# ifndef SWIG_PYTHON_INITIALIZE_THREADS
# define SWIG_PYTHON_INITIALIZE_THREADS PyEval_InitThreads()
# endif
# ifdef __cplusplus /* C++ code */
class SWIG_Python_Thread_Block {
bool status;
PyGILState_STATE state;
public:
void end() { if (status) { PyGILState_Release(state); status = false;} }
SWIG_Python_Thread_Block() : status(true), state(PyGILState_Ensure()) {}
~SWIG_Python_Thread_Block() { end(); }
};
class SWIG_Python_Thread_Allow {
bool status;
PyThreadState *save;
public:
void end() { if (status) { PyEval_RestoreThread(save); status = false; }}
SWIG_Python_Thread_Allow() : status(true), save(PyEval_SaveThread()) {}
~SWIG_Python_Thread_Allow() { end(); }
};
# define SWIG_PYTHON_THREAD_BEGIN_BLOCK SWIG_Python_Thread_Block _swig_thread_block
# define SWIG_PYTHON_THREAD_END_BLOCK _swig_thread_block.end()
# define SWIG_PYTHON_THREAD_BEGIN_ALLOW SWIG_Python_Thread_Allow _swig_thread_allow
# define SWIG_PYTHON_THREAD_END_ALLOW _swig_thread_allow.end()
# else /* C code */
# define SWIG_PYTHON_THREAD_BEGIN_BLOCK PyGILState_STATE _swig_thread_block = PyGILState_Ensure()
# define SWIG_PYTHON_THREAD_END_BLOCK PyGILState_Release(_swig_thread_block)
# define SWIG_PYTHON_THREAD_BEGIN_ALLOW PyThreadState *_swig_thread_allow = PyEval_SaveThread()
# define SWIG_PYTHON_THREAD_END_ALLOW PyEval_RestoreThread(_swig_thread_allow)
# endif
# else /* Old thread way, not implemented, user must provide it */
# if !defined(SWIG_PYTHON_INITIALIZE_THREADS)
# define SWIG_PYTHON_INITIALIZE_THREADS
# endif
# if !defined(SWIG_PYTHON_THREAD_BEGIN_BLOCK)
# define SWIG_PYTHON_THREAD_BEGIN_BLOCK
# endif
# if !defined(SWIG_PYTHON_THREAD_END_BLOCK)
# define SWIG_PYTHON_THREAD_END_BLOCK
# endif
# if !defined(SWIG_PYTHON_THREAD_BEGIN_ALLOW)
# define SWIG_PYTHON_THREAD_BEGIN_ALLOW
# endif
# if !defined(SWIG_PYTHON_THREAD_END_ALLOW)
# define SWIG_PYTHON_THREAD_END_ALLOW
# endif
# endif
#else /* No thread support */
# define SWIG_PYTHON_INITIALIZE_THREADS
# define SWIG_PYTHON_THREAD_BEGIN_BLOCK
# define SWIG_PYTHON_THREAD_END_BLOCK
# define SWIG_PYTHON_THREAD_BEGIN_ALLOW
# define SWIG_PYTHON_THREAD_END_ALLOW
#endif
/* -----------------------------------------------------------------------------
* Python API portion that goes into the runtime
* ----------------------------------------------------------------------------- */
#ifdef __cplusplus
extern "C" {
#if 0
} /* cc-mode */
#endif
#endif
/* -----------------------------------------------------------------------------
* Constant declarations
* ----------------------------------------------------------------------------- */
/* Constant Types */
#define SWIG_PY_POINTER 4
#define SWIG_PY_BINARY 5
/* Constant information structure */
typedef struct swig_const_info {
int type;
char *name;
long lvalue;
double dvalue;
void *pvalue;
swig_type_info **ptype;
} swig_const_info;
#ifdef __cplusplus
#if 0
{ /* cc-mode */
#endif
}
#endif
/* -----------------------------------------------------------------------------
* See the LICENSE file for information on copyright, usage and redistribution
* of SWIG, and the README file for authors - http://www.swig.org/release.html.
*
* pyrun.swg
*
* This file contains the runtime support for Python modules
* and includes code for managing global variables and pointer
* type checking.
*
* ----------------------------------------------------------------------------- */
/* Common SWIG API */
/* for raw pointers */
#define SWIG_Python_ConvertPtr(obj, pptr, type, flags) SWIG_Python_ConvertPtrAndOwn(obj, pptr, type, flags, 0)
#define SWIG_ConvertPtr(obj, pptr, type, flags) SWIG_Python_ConvertPtr(obj, pptr, type, flags)
#define SWIG_ConvertPtrAndOwn(obj,pptr,type,flags,own) SWIG_Python_ConvertPtrAndOwn(obj, pptr, type, flags, own)
#define SWIG_NewPointerObj(ptr, type, flags) SWIG_Python_NewPointerObj(ptr, type, flags)
#define SWIG_CheckImplicit(ty) SWIG_Python_CheckImplicit(ty)
#define SWIG_AcquirePtr(ptr, src) SWIG_Python_AcquirePtr(ptr, src)
#define swig_owntype int
/* for raw packed data */
#define SWIG_ConvertPacked(obj, ptr, sz, ty) SWIG_Python_ConvertPacked(obj, ptr, sz, ty)
#define SWIG_NewPackedObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)
/* for class or struct pointers */
#define SWIG_ConvertInstance(obj, pptr, type, flags) SWIG_ConvertPtr(obj, pptr, type, flags)
#define SWIG_NewInstanceObj(ptr, type, flags) SWIG_NewPointerObj(ptr, type, flags)
/* for C or C++ function pointers */
#define SWIG_ConvertFunctionPtr(obj, pptr, type) SWIG_Python_ConvertFunctionPtr(obj, pptr, type)
#define SWIG_NewFunctionPtrObj(ptr, type) SWIG_Python_NewPointerObj(ptr, type, 0)
/* for C++ member pointers, ie, member methods */
#define SWIG_ConvertMember(obj, ptr, sz, ty) SWIG_Python_ConvertPacked(obj, ptr, sz, ty)
#define SWIG_NewMemberObj(ptr, sz, type) SWIG_Python_NewPackedObj(ptr, sz, type)
/* Runtime API */
#define SWIG_GetModule(clientdata) SWIG_Python_GetModule()
#define SWIG_SetModule(clientdata, pointer) SWIG_Python_SetModule(pointer)
#define SWIG_NewClientData(obj) PySwigClientData_New(obj)
#define SWIG_SetErrorObj SWIG_Python_SetErrorObj
#define SWIG_SetErrorMsg SWIG_Python_SetErrorMsg
#define SWIG_ErrorType(code) SWIG_Python_ErrorType(code)
#define SWIG_Error(code, msg) SWIG_Python_SetErrorMsg(SWIG_ErrorType(code), msg)
#define SWIG_fail goto fail
/* Runtime API implementation */
/* Error manipulation */
SWIGINTERN void
SWIG_Python_SetErrorObj(PyObject *errtype, PyObject *obj) {
SWIG_PYTHON_THREAD_BEGIN_BLOCK;
PyErr_SetObject(errtype, obj);
Py_DECREF(obj);
SWIG_PYTHON_THREAD_END_BLOCK;
}
SWIGINTERN void
SWIG_Python_SetErrorMsg(PyObject *errtype, const char *msg) {
SWIG_PYTHON_THREAD_BEGIN_BLOCK;
PyErr_SetString(errtype, (char *) msg);
SWIG_PYTHON_THREAD_END_BLOCK;
}
#define SWIG_Python_Raise(obj, type, desc) SWIG_Python_SetErrorObj(SWIG_Python_ExceptionType(desc), obj)
/* Set a constant value */
SWIGINTERN void
SWIG_Python_SetConstant(PyObject *d, const char *name, PyObject *obj) {
PyDict_SetItemString(d, (char*) name, obj);
Py_DECREF(obj);
}
/* Append a value to the result obj */
SWIGINTERN PyObject*
SWIG_Python_AppendOutput(PyObject* result, PyObject* obj) {
#if !defined(SWIG_PYTHON_OUTPUT_TUPLE)
if (!result) {
result = obj;
} else if (result == Py_None) {
Py_DECREF(result);
result = obj;
} else {
if (!PyList_Check(result)) {
PyObject *o2 = result;
result = PyList_New(1);
PyList_SetItem(result, 0, o2);
}
PyList_Append(result,obj);
Py_DECREF(obj);
}
return result;
#else
PyObject* o2;
PyObject* o3;
if (!result) {
result = obj;
} else if (result == Py_None) {
Py_DECREF(result);
result = obj;
} else {
if (!PyTuple_Check(result)) {
o2 = result;
result = PyTuple_New(1);
PyTuple_SET_ITEM(result, 0, o2);
}
o3 = PyTuple_New(1);
PyTuple_SET_ITEM(o3, 0, obj);
o2 = result;
result = PySequence_Concat(o2, o3);
Py_DECREF(o2);
Py_DECREF(o3);
}
return result;
#endif
}
/* Unpack the argument tuple */
SWIGINTERN int
SWIG_Python_UnpackTuple(PyObject *args, const char *name, int min, int max, PyObject **objs)
{
if (!args) {
if (!min && !max) {
return 1;
} else {
PyErr_Format(PyExc_TypeError, "%s expected %s%d arguments, got none",
name, (min == max ? "" : "at least "), min);
return 0;
}
}
if (!PyTuple_Check(args)) {
PyErr_SetString(PyExc_SystemError, "UnpackTuple() argument list is not a tuple");
return 0;
} else {
register int l = PyTuple_GET_SIZE(args);
if (l < min) {
PyErr_Format(PyExc_TypeError, "%s expected %s%d arguments, got %d",
name, (min == max ? "" : "at least "), min, l);
return 0;
} else if (l > max) {
PyErr_Format(PyExc_TypeError, "%s expected %s%d arguments, got %d",
name, (min == max ? "" : "at most "), max, l);
return 0;
} else {
register int i;
for (i = 0; i < l; ++i) {
objs[i] = PyTuple_GET_ITEM(args, i);
}
for (; l < max; ++l) {
objs[l] = 0;
}
return i + 1;
}
}
}
/* A functor is a function object with one single object argument */
#if PY_VERSION_HEX >= 0x02020000
#define SWIG_Python_CallFunctor(functor, obj) PyObject_CallFunctionObjArgs(functor, obj, NULL);
#else
#define SWIG_Python_CallFunctor(functor, obj) PyObject_CallFunction(functor, "O", obj);
#endif
/*
Helper for static pointer initialization for both C and C++ code, for example
static PyObject *SWIG_STATIC_POINTER(MyVar) = NewSomething(...);
*/
#ifdef __cplusplus
#define SWIG_STATIC_POINTER(var) var
#else
#define SWIG_STATIC_POINTER(var) var = 0; if (!var) var
#endif
/* -----------------------------------------------------------------------------
* Pointer declarations
* ----------------------------------------------------------------------------- */
/* Flags for new pointer objects */
#define SWIG_POINTER_NOSHADOW (SWIG_POINTER_OWN << 1)
#define SWIG_POINTER_NEW (SWIG_POINTER_NOSHADOW | SWIG_POINTER_OWN)
#define SWIG_POINTER_IMPLICIT_CONV (SWIG_POINTER_DISOWN << 1)
#ifdef __cplusplus
extern "C" {
#if 0
} /* cc-mode */
#endif
#endif
/* How to access Py_None */
#if defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__)
# ifndef SWIG_PYTHON_NO_BUILD_NONE
# ifndef SWIG_PYTHON_BUILD_NONE
# define SWIG_PYTHON_BUILD_NONE
# endif
# endif
#endif
#ifdef SWIG_PYTHON_BUILD_NONE
# ifdef Py_None
# undef Py_None
# define Py_None SWIG_Py_None()
# endif
SWIGRUNTIMEINLINE PyObject *
_SWIG_Py_None(void)
{
PyObject *none = Py_BuildValue((char*)"");
Py_DECREF(none);
return none;
}
SWIGRUNTIME PyObject *
SWIG_Py_None(void)
{
static PyObject *SWIG_STATIC_POINTER(none) = _SWIG_Py_None();
return none;
}
#endif
/* The python void return value */
SWIGRUNTIMEINLINE PyObject *
SWIG_Py_Void(void)
{
PyObject *none = Py_None;
Py_INCREF(none);
return none;
}
/* PySwigClientData */
typedef struct {
PyObject *klass;
PyObject *newraw;
PyObject *newargs;
PyObject *destroy;
int delargs;
int implicitconv;
} PySwigClientData;
SWIGRUNTIMEINLINE int
SWIG_Python_CheckImplicit(swig_type_info *ty)
{
PySwigClientData *data = (PySwigClientData *)ty->clientdata;
return data ? data->implicitconv : 0;
}
SWIGRUNTIMEINLINE PyObject *
SWIG_Python_ExceptionType(swig_type_info *desc) {
PySwigClientData *data = desc ? (PySwigClientData *) desc->clientdata : 0;
PyObject *klass = data ? data->klass : 0;
return (klass ? klass : PyExc_RuntimeError);
}
SWIGRUNTIME PySwigClientData *
PySwigClientData_New(PyObject* obj)
{
if (!obj) {
return 0;
} else {
PySwigClientData *data = (PySwigClientData *)malloc(sizeof(PySwigClientData));
/* the klass element */
data->klass = obj;
Py_INCREF(data->klass);
/* the newraw method and newargs arguments used to create a new raw instance */
if (PyClass_Check(obj)) {
data->newraw = 0;
data->newargs = obj;
Py_INCREF(obj);
} else {
#if (PY_VERSION_HEX < 0x02020000)
data->newraw = 0;
#else
data->newraw = PyObject_GetAttrString(data->klass, (char *)"__new__");
#endif
if (data->newraw) {
Py_INCREF(data->newraw);
data->newargs = PyTuple_New(1);
PyTuple_SetItem(data->newargs, 0, obj);
} else {
data->newargs = obj;
}
Py_INCREF(data->newargs);
}
/* the destroy method, aka as the C++ delete method */
data->destroy = PyObject_GetAttrString(data->klass, (char *)"__swig_destroy__");
if (PyErr_Occurred()) {
PyErr_Clear();
data->destroy = 0;
}
if (data->destroy) {
int flags;
Py_INCREF(data->destroy);
flags = PyCFunction_GET_FLAGS(data->destroy);
#ifdef METH_O
data->delargs = !(flags & (METH_O));
#else
data->delargs = 0;
#endif
} else {
data->delargs = 0;
}
data->implicitconv = 0;
return data;
}
}
SWIGRUNTIME void
PySwigClientData_Del(PySwigClientData* data)
{
Py_XDECREF(data->newraw);
Py_XDECREF(data->newargs);
Py_XDECREF(data->destroy);
}
/* =============== PySwigObject =====================*/
typedef struct {
PyObject_HEAD
void *ptr;
swig_type_info *ty;
int own;
PyObject *next;
} PySwigObject;
SWIGRUNTIME PyObject *
PySwigObject_long(PySwigObject *v)
{
return PyLong_FromVoidPtr(v->ptr);
}
SWIGRUNTIME PyObject *
PySwigObject_format(const char* fmt, PySwigObject *v)
{
PyObject *res = NULL;
PyObject *args = PyTuple_New(1);
if (args) {
if (PyTuple_SetItem(args, 0, PySwigObject_long(v)) == 0) {
PyObject *ofmt = PyString_FromString(fmt);
if (ofmt) {
res = PyString_Format(ofmt,args);
Py_DECREF(ofmt);
}
Py_DECREF(args);
}
}
return res;
}
SWIGRUNTIME PyObject *
PySwigObject_oct(PySwigObject *v)
{
return PySwigObject_format("%o",v);
}
SWIGRUNTIME PyObject *
PySwigObject_hex(PySwigObject *v)
{
return PySwigObject_format("%x",v);
}
SWIGRUNTIME PyObject *
#ifdef METH_NOARGS
PySwigObject_repr(PySwigObject *v)
#else
PySwigObject_repr(PySwigObject *v, PyObject *args)
#endif
{
const char *name = SWIG_TypePrettyName(v->ty);
PyObject *hex = PySwigObject_hex(v);
PyObject *repr = PyString_FromFormat("<Swig Object of type '%s' at 0x%s>", name, PyString_AsString(hex));
Py_DECREF(hex);
if (v->next) {
#ifdef METH_NOARGS
PyObject *nrep = PySwigObject_repr((PySwigObject *)v->next);
#else
PyObject *nrep = PySwigObject_repr((PySwigObject *)v->next, args);
#endif
PyString_ConcatAndDel(&repr,nrep);
}
return repr;
}
SWIGRUNTIME int
PySwigObject_print(PySwigObject *v, FILE *fp, int SWIGUNUSEDPARM(flags))
{
#ifdef METH_NOARGS
PyObject *repr = PySwigObject_repr(v);
#else
PyObject *repr = PySwigObject_repr(v, NULL);
#endif
if (repr) {
fputs(PyString_AsString(repr), fp);
Py_DECREF(repr);
return 0;
} else {
return 1;
}
}
SWIGRUNTIME PyObject *
PySwigObject_str(PySwigObject *v)
{
char result[SWIG_BUFFER_SIZE];
return SWIG_PackVoidPtr(result, v->ptr, v->ty->name, sizeof(result)) ?
PyString_FromString(result) : 0;
}
SWIGRUNTIME int
PySwigObject_compare(PySwigObject *v, PySwigObject *w)
{
void *i = v->ptr;
void *j = w->ptr;
return (i < j) ? -1 : ((i > j) ? 1 : 0);
}
SWIGRUNTIME PyTypeObject* _PySwigObject_type(void);
SWIGRUNTIME PyTypeObject*
PySwigObject_type(void) {
static PyTypeObject *SWIG_STATIC_POINTER(type) = _PySwigObject_type();
return type;
}
SWIGRUNTIMEINLINE int
PySwigObject_Check(PyObject *op) {
return ((op)->ob_type == PySwigObject_type())
|| (strcmp((op)->ob_type->tp_name,"PySwigObject") == 0);
}
SWIGRUNTIME PyObject *
PySwigObject_New(void *ptr, swig_type_info *ty, int own);
SWIGRUNTIME void
PySwigObject_dealloc(PyObject *v)
{
PySwigObject *sobj = (PySwigObject *) v;
PyObject *next = sobj->next;
if (sobj->own) {
swig_type_info *ty = sobj->ty;
PySwigClientData *data = ty ? (PySwigClientData *) ty->clientdata : 0;
PyObject *destroy = data ? data->destroy : 0;
if (destroy) {
/* destroy is always a VARARGS method */
PyObject *res;
if (data->delargs) {
/* we need to create a temporal object to carry the destroy operation */
PyObject *tmp = PySwigObject_New(sobj->ptr, ty, 0);
res = SWIG_Python_CallFunctor(destroy, tmp);
Py_DECREF(tmp);
} else {
PyCFunction meth = PyCFunction_GET_FUNCTION(destroy);
PyObject *mself = PyCFunction_GET_SELF(destroy);
res = ((*meth)(mself, v));
}
Py_XDECREF(res);
} else {
const char *name = SWIG_TypePrettyName(ty);
#if !defined(SWIG_PYTHON_SILENT_MEMLEAK)
printf("swig/python detected a memory leak of type '%s', no destructor found.\\n", name);
#endif
}
}
Py_XDECREF(next);
PyObject_DEL(v);
}
SWIGRUNTIME PyObject*
PySwigObject_append(PyObject* v, PyObject* next)
{
PySwigObject *sobj = (PySwigObject *) v;
#ifndef METH_O
PyObject *tmp = 0;
if (!PyArg_ParseTuple(next,(char *)"O:append", &tmp)) return NULL;
next = tmp;
#endif
if (!PySwigObject_Check(next)) {
return NULL;
}
sobj->next = next;
Py_INCREF(next);
return SWIG_Py_Void();
}
SWIGRUNTIME PyObject*
#ifdef METH_NOARGS
PySwigObject_next(PyObject* v)
#else
PySwigObject_next(PyObject* v, PyObject *SWIGUNUSEDPARM(args))
#endif
{
PySwigObject *sobj = (PySwigObject *) v;
if (sobj->next) {
Py_INCREF(sobj->next);
return sobj->next;
} else {
return SWIG_Py_Void();
}
}
SWIGINTERN PyObject*
#ifdef METH_NOARGS
PySwigObject_disown(PyObject *v)
#else
PySwigObject_disown(PyObject* v, PyObject *SWIGUNUSEDPARM(args))
#endif
{
PySwigObject *sobj = (PySwigObject *)v;
sobj->own = 0;
return SWIG_Py_Void();
}
SWIGINTERN PyObject*
#ifdef METH_NOARGS
PySwigObject_acquire(PyObject *v)
#else
PySwigObject_acquire(PyObject* v, PyObject *SWIGUNUSEDPARM(args))
#endif
{
PySwigObject *sobj = (PySwigObject *)v;
sobj->own = SWIG_POINTER_OWN;
return SWIG_Py_Void();
}
SWIGINTERN PyObject*
PySwigObject_own(PyObject *v, PyObject *args)
{
PyObject *val = 0;
#if (PY_VERSION_HEX < 0x02020000)
if (!PyArg_ParseTuple(args,(char *)"|O:own",&val))
#else
if (!PyArg_UnpackTuple(args, (char *)"own", 0, 1, &val))
#endif
{
return NULL;
}
else
{
PySwigObject *sobj = (PySwigObject *)v;
PyObject *obj = PyBool_FromLong(sobj->own);
if (val) {
#ifdef METH_NOARGS
if (PyObject_IsTrue(val)) {
PySwigObject_acquire(v);
} else {
PySwigObject_disown(v);
}
#else
if (PyObject_IsTrue(val)) {
PySwigObject_acquire(v,args);
} else {
PySwigObject_disown(v,args);
}
#endif
}
return obj;
}
}
#ifdef METH_O
static PyMethodDef
swigobject_methods[] = {
{(char *)"disown", (PyCFunction)PySwigObject_disown, METH_NOARGS, (char *)"releases ownership of the pointer"},
{(char *)"acquire", (PyCFunction)PySwigObject_acquire, METH_NOARGS, (char *)"aquires ownership of the pointer"},
{(char *)"own", (PyCFunction)PySwigObject_own, METH_VARARGS, (char *)"returns/sets ownership of the pointer"},
{(char *)"append", (PyCFunction)PySwigObject_append, METH_O, (char *)"appends another 'this' object"},
{(char *)"next", (PyCFunction)PySwigObject_next, METH_NOARGS, (char *)"returns the next 'this' object"},
{(char *)"__repr__",(PyCFunction)PySwigObject_repr, METH_NOARGS, (char *)"returns object representation"},
{0, 0, 0, 0}
};
#else
static PyMethodDef
swigobject_methods[] = {
{(char *)"disown", (PyCFunction)PySwigObject_disown, METH_VARARGS, (char *)"releases ownership of the pointer"},
{(char *)"acquire", (PyCFunction)PySwigObject_acquire, METH_VARARGS, (char *)"aquires ownership of the pointer"},
{(char *)"own", (PyCFunction)PySwigObject_own, METH_VARARGS, (char *)"returns/sets ownership of the pointer"},
{(char *)"append", (PyCFunction)PySwigObject_append, METH_VARARGS, (char *)"appends another 'this' object"},
{(char *)"next", (PyCFunction)PySwigObject_next, METH_VARARGS, (char *)"returns the next 'this' object"},
{(char *)"__repr__",(PyCFunction)PySwigObject_repr, METH_VARARGS, (char *)"returns object representation"},
{0, 0, 0, 0}
};
#endif
#if PY_VERSION_HEX < 0x02020000
SWIGINTERN PyObject *
PySwigObject_getattr(PySwigObject *sobj,char *name)
{
return Py_FindMethod(swigobject_methods, (PyObject *)sobj, name);
}
#endif
SWIGRUNTIME PyTypeObject*
_PySwigObject_type(void) {
static char swigobject_doc[] = "Swig object carries a C/C++ instance pointer";
static PyNumberMethods PySwigObject_as_number = {
(binaryfunc)0, /*nb_add*/
(binaryfunc)0, /*nb_subtract*/
(binaryfunc)0, /*nb_multiply*/
(binaryfunc)0, /*nb_divide*/
(binaryfunc)0, /*nb_remainder*/
(binaryfunc)0, /*nb_divmod*/
(ternaryfunc)0,/*nb_power*/
(unaryfunc)0, /*nb_negative*/
(unaryfunc)0, /*nb_positive*/
(unaryfunc)0, /*nb_absolute*/
(inquiry)0, /*nb_nonzero*/
0, /*nb_invert*/
0, /*nb_lshift*/
0, /*nb_rshift*/
0, /*nb_and*/
0, /*nb_xor*/
0, /*nb_or*/
(coercion)0, /*nb_coerce*/
(unaryfunc)PySwigObject_long, /*nb_int*/
(unaryfunc)PySwigObject_long, /*nb_long*/
(unaryfunc)0, /*nb_float*/
(unaryfunc)PySwigObject_oct, /*nb_oct*/
(unaryfunc)PySwigObject_hex, /*nb_hex*/
#if PY_VERSION_HEX >= 0x02020000
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_true_divide */
#elif PY_VERSION_HEX >= 0x02000000
0,0,0,0,0,0,0,0,0,0,0 /* nb_inplace_add -> nb_inplace_or */
#endif
};
static PyTypeObject pyswigobject_type;
static int type_init = 0;
if (!type_init) {
const PyTypeObject tmp
= {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
(char *)"PySwigObject", /* tp_name */
sizeof(PySwigObject), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)PySwigObject_dealloc, /* tp_dealloc */
(printfunc)PySwigObject_print, /* tp_print */
#if PY_VERSION_HEX < 0x02020000
(getattrfunc)PySwigObject_getattr, /* tp_getattr */
#else
(getattrfunc)0, /* tp_getattr */
#endif
(setattrfunc)0, /* tp_setattr */
(cmpfunc)PySwigObject_compare, /* tp_compare */
(reprfunc)PySwigObject_repr, /* tp_repr */
&PySwigObject_as_number, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
(hashfunc)0, /* tp_hash */
(ternaryfunc)0, /* tp_call */
(reprfunc)PySwigObject_str, /* tp_str */
PyObject_GenericGetAttr, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT, /* tp_flags */
swigobject_doc, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
#if PY_VERSION_HEX >= 0x02020000
0, /* tp_iter */
0, /* tp_iternext */
swigobject_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
0, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0, /* tp_free */
0, /* tp_is_gc */
0, /* tp_bases */
0, /* tp_mro */
0, /* tp_cache */
0, /* tp_subclasses */
0, /* tp_weaklist */
#endif
#if PY_VERSION_HEX >= 0x02030000
0, /* tp_del */
#endif
#ifdef COUNT_ALLOCS
0,0,0,0 /* tp_alloc -> tp_next */
#endif
};
pyswigobject_type = tmp;
pyswigobject_type.ob_type = &PyType_Type;
type_init = 1;
}
return &pyswigobject_type;
}
SWIGRUNTIME PyObject *
PySwigObject_New(void *ptr, swig_type_info *ty, int own)
{
PySwigObject *sobj = PyObject_NEW(PySwigObject, PySwigObject_type());
if (sobj) {
sobj->ptr = ptr;
sobj->ty = ty;
sobj->own = own;
sobj->next = 0;
}
return (PyObject *)sobj;
}
/* -----------------------------------------------------------------------------
* Implements a simple Swig Packed type, and use it instead of string
* ----------------------------------------------------------------------------- */
typedef struct {
PyObject_HEAD
void *pack;
swig_type_info *ty;
size_t size;
} PySwigPacked;
SWIGRUNTIME int
PySwigPacked_print(PySwigPacked *v, FILE *fp, int SWIGUNUSEDPARM(flags))
{
char result[SWIG_BUFFER_SIZE];
fputs("<Swig Packed ", fp);
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {
fputs("at ", fp);
fputs(result, fp);
}
fputs(v->ty->name,fp);
fputs(">", fp);
return 0;
}
SWIGRUNTIME PyObject *
PySwigPacked_repr(PySwigPacked *v)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))) {
return PyString_FromFormat("<Swig Packed at %s%s>", result, v->ty->name);
} else {
return PyString_FromFormat("<Swig Packed %s>", v->ty->name);
}
}
SWIGRUNTIME PyObject *
PySwigPacked_str(PySwigPacked *v)
{
char result[SWIG_BUFFER_SIZE];
if (SWIG_PackDataName(result, v->pack, v->size, 0, sizeof(result))){
return PyString_FromFormat("%s%s", result, v->ty->name);
} else {
return PyString_FromString(v->ty->name);
}
}
SWIGRUNTIME int
PySwigPacked_compare(PySwigPacked *v, PySwigPacked *w)
{
size_t i = v->size;
size_t j = w->size;
int s = (i < j) ? -1 : ((i > j) ? 1 : 0);
return s ? s : strncmp((char *)v->pack, (char *)w->pack, 2*v->size);
}
SWIGRUNTIME PyTypeObject* _PySwigPacked_type(void);
SWIGRUNTIME PyTypeObject*
PySwigPacked_type(void) {
static PyTypeObject *SWIG_STATIC_POINTER(type) = _PySwigPacked_type();
return type;
}
SWIGRUNTIMEINLINE int
PySwigPacked_Check(PyObject *op) {
return ((op)->ob_type == _PySwigPacked_type())
|| (strcmp((op)->ob_type->tp_name,"PySwigPacked") == 0);
}
SWIGRUNTIME void
PySwigPacked_dealloc(PyObject *v)
{
if (PySwigPacked_Check(v)) {
PySwigPacked *sobj = (PySwigPacked *) v;
free(sobj->pack);
}
PyObject_DEL(v);
}
SWIGRUNTIME PyTypeObject*
_PySwigPacked_type(void) {
static char swigpacked_doc[] = "Swig object carries a C/C++ instance pointer";
static PyTypeObject pyswigpacked_type;
static int type_init = 0;
if (!type_init) {
const PyTypeObject tmp
= {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
(char *)"PySwigPacked", /* tp_name */
sizeof(PySwigPacked), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)PySwigPacked_dealloc, /* tp_dealloc */
(printfunc)PySwigPacked_print, /* tp_print */
(getattrfunc)0, /* tp_getattr */
(setattrfunc)0, /* tp_setattr */
(cmpfunc)PySwigPacked_compare, /* tp_compare */
(reprfunc)PySwigPacked_repr, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
(hashfunc)0, /* tp_hash */
(ternaryfunc)0, /* tp_call */
(reprfunc)PySwigPacked_str, /* tp_str */
PyObject_GenericGetAttr, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT, /* tp_flags */
swigpacked_doc, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
#if PY_VERSION_HEX >= 0x02020000
0, /* tp_iter */
0, /* tp_iternext */
0, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
0, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0, /* tp_free */
0, /* tp_is_gc */
0, /* tp_bases */
0, /* tp_mro */
0, /* tp_cache */
0, /* tp_subclasses */
0, /* tp_weaklist */
#endif
#if PY_VERSION_HEX >= 0x02030000
0, /* tp_del */
#endif
#ifdef COUNT_ALLOCS
0,0,0,0 /* tp_alloc -> tp_next */
#endif
};
pyswigpacked_type = tmp;
pyswigpacked_type.ob_type = &PyType_Type;
type_init = 1;
}
return &pyswigpacked_type;
}
SWIGRUNTIME PyObject *
PySwigPacked_New(void *ptr, size_t size, swig_type_info *ty)
{
PySwigPacked *sobj = PyObject_NEW(PySwigPacked, PySwigPacked_type());
if (sobj) {
void *pack = malloc(size);
if (pack) {
memcpy(pack, ptr, size);
sobj->pack = pack;
sobj->ty = ty;
sobj->size = size;
} else {
PyObject_DEL((PyObject *) sobj);
sobj = 0;
}
}
return (PyObject *) sobj;
}
SWIGRUNTIME swig_type_info *
PySwigPacked_UnpackData(PyObject *obj, void *ptr, size_t size)
{
if (PySwigPacked_Check(obj)) {
PySwigPacked *sobj = (PySwigPacked *)obj;
if (sobj->size != size) return 0;
memcpy(ptr, sobj->pack, size);
return sobj->ty;
} else {
return 0;
}
}
/* -----------------------------------------------------------------------------
* pointers/data manipulation
* ----------------------------------------------------------------------------- */
SWIGRUNTIMEINLINE PyObject *
_SWIG_This(void)
{
return PyString_FromString("this");
}
SWIGRUNTIME PyObject *
SWIG_This(void)
{
static PyObject *SWIG_STATIC_POINTER(swig_this) = _SWIG_This();
return swig_this;
}
/* #define SWIG_PYTHON_SLOW_GETSET_THIS */
SWIGRUNTIME PySwigObject *
SWIG_Python_GetSwigThis(PyObject *pyobj)
{
if (PySwigObject_Check(pyobj)) {
return (PySwigObject *) pyobj;
} else {
PyObject *obj = 0;
#if (!defined(SWIG_PYTHON_SLOW_GETSET_THIS) && (PY_VERSION_HEX >= 0x02030000))
if (PyInstance_Check(pyobj)) {
obj = _PyInstance_Lookup(pyobj, SWIG_This());
} else {
PyObject **dictptr = _PyObject_GetDictPtr(pyobj);
if (dictptr != NULL) {
PyObject *dict = *dictptr;
obj = dict ? PyDict_GetItem(dict, SWIG_This()) : 0;
} else {
#ifdef PyWeakref_CheckProxy
if (PyWeakref_CheckProxy(pyobj)) {
PyObject *wobj = PyWeakref_GET_OBJECT(pyobj);
return wobj ? SWIG_Python_GetSwigThis(wobj) : 0;
}
#endif
obj = PyObject_GetAttr(pyobj,SWIG_This());
if (obj) {
Py_DECREF(obj);
} else {
if (PyErr_Occurred()) PyErr_Clear();
return 0;
}
}
}
#else
obj = PyObject_GetAttr(pyobj,SWIG_This());
if (obj) {
Py_DECREF(obj);
} else {
if (PyErr_Occurred()) PyErr_Clear();
return 0;
}
#endif
if (obj && !PySwigObject_Check(obj)) {
/* a PyObject is called 'this', try to get the 'real this'
PySwigObject from it */
return SWIG_Python_GetSwigThis(obj);
}
return (PySwigObject *)obj;
}
}
/* Acquire a pointer value */
SWIGRUNTIME int
SWIG_Python_AcquirePtr(PyObject *obj, int own) {
if (own) {
PySwigObject *sobj = SWIG_Python_GetSwigThis(obj);
if (sobj) {
int oldown = sobj->own;
sobj->own = own;
return oldown;
}
}
return 0;
}
/* Convert a pointer value */
SWIGRUNTIME int
SWIG_Python_ConvertPtrAndOwn(PyObject *obj, void **ptr, swig_type_info *ty, int flags, int *own) {
if (!obj) return SWIG_ERROR;
if (obj == Py_None) {
if (ptr) *ptr = 0;
return SWIG_OK;
} else {
PySwigObject *sobj = SWIG_Python_GetSwigThis(obj);
while (sobj) {
void *vptr = sobj->ptr;
if (ty) {
swig_type_info *to = sobj->ty;
if (to == ty) {
/* no type cast needed */
if (ptr) *ptr = vptr;
break;
} else {
swig_cast_info *tc = SWIG_TypeCheck(to->name,ty);
if (!tc) {
sobj = (PySwigObject *)sobj->next;
} else {
if (ptr) *ptr = SWIG_TypeCast(tc,vptr);
break;
}
}
} else {
if (ptr) *ptr = vptr;
break;
}
}
if (sobj) {
if (own) *own = sobj->own;
if (flags & SWIG_POINTER_DISOWN) {
sobj->own = 0;
}
return SWIG_OK;
} else {
int res = SWIG_ERROR;
if (flags & SWIG_POINTER_IMPLICIT_CONV) {
PySwigClientData *data = ty ? (PySwigClientData *) ty->clientdata : 0;
if (data && !data->implicitconv) {
PyObject *klass = data->klass;
if (klass) {
PyObject *impconv;
data->implicitconv = 1; /* avoid recursion and call 'explicit' constructors*/
impconv = SWIG_Python_CallFunctor(klass, obj);
data->implicitconv = 0;
if (PyErr_Occurred()) {
PyErr_Clear();
impconv = 0;
}
if (impconv) {
PySwigObject *iobj = SWIG_Python_GetSwigThis(impconv);
if (iobj) {
void *vptr;
res = SWIG_Python_ConvertPtrAndOwn((PyObject*)iobj, &vptr, ty, 0, 0);
if (SWIG_IsOK(res)) {
if (ptr) {
*ptr = vptr;
/* transfer the ownership to 'ptr' */
iobj->own = 0;
res = SWIG_AddCast(res);
res = SWIG_AddNewMask(res);
} else {
res = SWIG_AddCast(res);
}
}
}
Py_DECREF(impconv);
}
}
}
}
return res;
}
}
}
/* Convert a function ptr value */
SWIGRUNTIME int
SWIG_Python_ConvertFunctionPtr(PyObject *obj, void **ptr, swig_type_info *ty) {
if (!PyCFunction_Check(obj)) {
return SWIG_ConvertPtr(obj, ptr, ty, 0);
} else {
void *vptr = 0;
/* here we get the method pointer for callbacks */
const char *doc = (((PyCFunctionObject *)obj) -> m_ml -> ml_doc);
const char *desc = doc ? strstr(doc, "swig_ptr: ") : 0;
if (desc) {
desc = ty ? SWIG_UnpackVoidPtr(desc + 10, &vptr, ty->name) : 0;
if (!desc) return SWIG_ERROR;
}
if (ty) {
swig_cast_info *tc = SWIG_TypeCheck(desc,ty);
if (!tc) return SWIG_ERROR;
*ptr = SWIG_TypeCast(tc,vptr);
} else {
*ptr = vptr;
}
return SWIG_OK;
}
}
/* Convert a packed value value */
SWIGRUNTIME int
SWIG_Python_ConvertPacked(PyObject *obj, void *ptr, size_t sz, swig_type_info *ty) {
swig_type_info *to = PySwigPacked_UnpackData(obj, ptr, sz);
if (!to) return SWIG_ERROR;
if (ty) {
if (to != ty) {
/* check type cast? */
swig_cast_info *tc = SWIG_TypeCheck(to->name,ty);
if (!tc) return SWIG_ERROR;
}
}
return SWIG_OK;
}
/* -----------------------------------------------------------------------------
* Create a new pointer object
* ----------------------------------------------------------------------------- */
/*
Create a new instance object, whitout calling __init__, and set the
'this' attribute.
*/
SWIGRUNTIME PyObject*
SWIG_Python_NewShadowInstance(PySwigClientData *data, PyObject *swig_this)
{
#if (PY_VERSION_HEX >= 0x02020000)
PyObject *inst = 0;
PyObject *newraw = data->newraw;
if (newraw) {
inst = PyObject_Call(newraw, data->newargs, NULL);
if (inst) {
#if !defined(SWIG_PYTHON_SLOW_GETSET_THIS)
PyObject **dictptr = _PyObject_GetDictPtr(inst);
if (dictptr != NULL) {
PyObject *dict = *dictptr;
if (dict == NULL) {
dict = PyDict_New();
*dictptr = dict;
PyDict_SetItem(dict, SWIG_This(), swig_this);
}
}
#else
PyObject *key = SWIG_This();
PyObject_SetAttr(inst, key, swig_this);
#endif
}
} else {
PyObject *dict = PyDict_New();
PyDict_SetItem(dict, SWIG_This(), swig_this);
inst = PyInstance_NewRaw(data->newargs, dict);
Py_DECREF(dict);
}
return inst;
#else
#if (PY_VERSION_HEX >= 0x02010000)
PyObject *inst;
PyObject *dict = PyDict_New();
PyDict_SetItem(dict, SWIG_This(), swig_this);
inst = PyInstance_NewRaw(data->newargs, dict);
Py_DECREF(dict);
return (PyObject *) inst;
#else
PyInstanceObject *inst = PyObject_NEW(PyInstanceObject, &PyInstance_Type);
if (inst == NULL) {
return NULL;
}
inst->in_class = (PyClassObject *)data->newargs;
Py_INCREF(inst->in_class);
inst->in_dict = PyDict_New();
if (inst->in_dict == NULL) {
Py_DECREF(inst);
return NULL;
}
#ifdef Py_TPFLAGS_HAVE_WEAKREFS
inst->in_weakreflist = NULL;
#endif
#ifdef Py_TPFLAGS_GC
PyObject_GC_Init(inst);
#endif
PyDict_SetItem(inst->in_dict, SWIG_This(), swig_this);
return (PyObject *) inst;
#endif
#endif
}
SWIGRUNTIME void
SWIG_Python_SetSwigThis(PyObject *inst, PyObject *swig_this)
{
PyObject *dict;
#if (PY_VERSION_HEX >= 0x02020000) && !defined(SWIG_PYTHON_SLOW_GETSET_THIS)
PyObject **dictptr = _PyObject_GetDictPtr(inst);
if (dictptr != NULL) {
dict = *dictptr;
if (dict == NULL) {
dict = PyDict_New();
*dictptr = dict;
}
PyDict_SetItem(dict, SWIG_This(), swig_this);
return;
}
#endif
dict = PyObject_GetAttrString(inst, (char*)"__dict__");
PyDict_SetItem(dict, SWIG_This(), swig_this);
Py_DECREF(dict);
}
SWIGINTERN PyObject *
SWIG_Python_InitShadowInstance(PyObject *args) {
PyObject *obj[2];
if (!SWIG_Python_UnpackTuple(args,(char*)"swiginit", 2, 2, obj)) {
return NULL;
} else {
PySwigObject *sthis = SWIG_Python_GetSwigThis(obj[0]);
if (sthis) {
PySwigObject_append((PyObject*) sthis, obj[1]);
} else {
SWIG_Python_SetSwigThis(obj[0], obj[1]);
}
return SWIG_Py_Void();
}
}
/* Create a new pointer object */
SWIGRUNTIME PyObject *
SWIG_Python_NewPointerObj(void *ptr, swig_type_info *type, int flags) {
if (!ptr) {
return SWIG_Py_Void();
} else {
int own = (flags & SWIG_POINTER_OWN) ? SWIG_POINTER_OWN : 0;
PyObject *robj = PySwigObject_New(ptr, type, own);
PySwigClientData *clientdata = type ? (PySwigClientData *)(type->clientdata) : 0;
if (clientdata && !(flags & SWIG_POINTER_NOSHADOW)) {
PyObject *inst = SWIG_Python_NewShadowInstance(clientdata, robj);
if (inst) {
Py_DECREF(robj);
robj = inst;
}
}
return robj;
}
}
/* Create a new packed object */
SWIGRUNTIMEINLINE PyObject *
SWIG_Python_NewPackedObj(void *ptr, size_t sz, swig_type_info *type) {
return ptr ? PySwigPacked_New((void *) ptr, sz, type) : SWIG_Py_Void();
}
/* -----------------------------------------------------------------------------*
* Get type list
* -----------------------------------------------------------------------------*/
#ifdef SWIG_LINK_RUNTIME
void *SWIG_ReturnGlobalTypeList(void *);
#endif
SWIGRUNTIME swig_module_info *
SWIG_Python_GetModule(void) {
static void *type_pointer = (void *)0;
/* first check if module already created */
if (!type_pointer) {
#ifdef SWIG_LINK_RUNTIME
type_pointer = SWIG_ReturnGlobalTypeList((void *)0);
#else
type_pointer = PyCObject_Import((char*)"swig_runtime_data" SWIG_RUNTIME_VERSION,
(char*)"type_pointer" SWIG_TYPE_TABLE_NAME);
if (PyErr_Occurred()) {
PyErr_Clear();
type_pointer = (void *)0;
}
#endif
}
return (swig_module_info *) type_pointer;
}
#if PY_MAJOR_VERSION < 2
/* PyModule_AddObject function was introduced in Python 2.0. The following function
is copied out of Python/modsupport.c in python version 2.3.4 */
SWIGINTERN int
PyModule_AddObject(PyObject *m, char *name, PyObject *o)
{
PyObject *dict;
if (!PyModule_Check(m)) {
PyErr_SetString(PyExc_TypeError,
"PyModule_AddObject() needs module as first arg");
return SWIG_ERROR;
}
if (!o) {
PyErr_SetString(PyExc_TypeError,
"PyModule_AddObject() needs non-NULL value");
return SWIG_ERROR;
}
dict = PyModule_GetDict(m);
if (dict == NULL) {
/* Internal error -- modules must have a dict! */
PyErr_Format(PyExc_SystemError, "module '%s' has no __dict__",
PyModule_GetName(m));
return SWIG_ERROR;
}
if (PyDict_SetItemString(dict, name, o))
return SWIG_ERROR;
Py_DECREF(o);
return SWIG_OK;
}
#endif
SWIGRUNTIME void
SWIG_Python_DestroyModule(void *vptr)
{
swig_module_info *swig_module = (swig_module_info *) vptr;
swig_type_info **types = swig_module->types;
size_t i;
for (i =0; i < swig_module->size; ++i) {
swig_type_info *ty = types[i];
if (ty->owndata) {
PySwigClientData *data = (PySwigClientData *) ty->clientdata;
if (data) PySwigClientData_Del(data);
}
}
Py_DECREF(SWIG_This());
}
SWIGRUNTIME void
SWIG_Python_SetModule(swig_module_info *swig_module) {
static PyMethodDef swig_empty_runtime_method_table[] = { {NULL, NULL, 0, NULL} };/* Sentinel */
PyObject *module = Py_InitModule((char*)"swig_runtime_data" SWIG_RUNTIME_VERSION,
swig_empty_runtime_method_table);
PyObject *pointer = PyCObject_FromVoidPtr((void *) swig_module, SWIG_Python_DestroyModule);
if (pointer && module) {
PyModule_AddObject(module, (char*)"type_pointer" SWIG_TYPE_TABLE_NAME, pointer);
} else {
Py_XDECREF(pointer);
}
}
/* The python cached type query */
SWIGRUNTIME PyObject *
SWIG_Python_TypeCache(void) {
static PyObject *SWIG_STATIC_POINTER(cache) = PyDict_New();
return cache;
}
SWIGRUNTIME swig_type_info *
SWIG_Python_TypeQuery(const char *type)
{
PyObject *cache = SWIG_Python_TypeCache();
PyObject *key = PyString_FromString(type);
PyObject *obj = PyDict_GetItem(cache, key);
swig_type_info *descriptor;
if (obj) {
descriptor = (swig_type_info *) PyCObject_AsVoidPtr(obj);
} else {
swig_module_info *swig_module = SWIG_Python_GetModule();
descriptor = SWIG_TypeQueryModule(swig_module, swig_module, type);
if (descriptor) {
obj = PyCObject_FromVoidPtr(descriptor, NULL);
PyDict_SetItem(cache, key, obj);
Py_DECREF(obj);
}
}
Py_DECREF(key);
return descriptor;
}
/*
For backward compatibility only
*/
#define SWIG_POINTER_EXCEPTION 0
#define SWIG_arg_fail(arg) SWIG_Python_ArgFail(arg)
#define SWIG_MustGetPtr(p, type, argnum, flags) SWIG_Python_MustGetPtr(p, type, argnum, flags)
SWIGRUNTIME int
SWIG_Python_AddErrMesg(const char* mesg, int infront)
{
if (PyErr_Occurred()) {
PyObject *type = 0;
PyObject *value = 0;
PyObject *traceback = 0;
PyErr_Fetch(&type, &value, &traceback);
if (value) {
PyObject *old_str = PyObject_Str(value);
Py_XINCREF(type);
PyErr_Clear();
if (infront) {
PyErr_Format(type, "%s %s", mesg, PyString_AsString(old_str));
} else {
PyErr_Format(type, "%s %s", PyString_AsString(old_str), mesg);
}
Py_DECREF(old_str);
}
return 1;
} else {
return 0;
}
}
SWIGRUNTIME int
SWIG_Python_ArgFail(int argnum)
{
if (PyErr_Occurred()) {
/* add information about failing argument */
char mesg[256];
PyOS_snprintf(mesg, sizeof(mesg), "argument number %d:", argnum);
return SWIG_Python_AddErrMesg(mesg, 1);
} else {
return 0;
}
}
SWIGRUNTIMEINLINE const char *
PySwigObject_GetDesc(PyObject *self)
{
PySwigObject *v = (PySwigObject *)self;
swig_type_info *ty = v ? v->ty : 0;
return ty ? ty->str : (char*)"";
}
SWIGRUNTIME void
SWIG_Python_TypeError(const char *type, PyObject *obj)
{
if (type) {
#if defined(SWIG_COBJECT_TYPES)
if (obj && PySwigObject_Check(obj)) {
const char *otype = (const char *) PySwigObject_GetDesc(obj);
if (otype) {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, 'PySwigObject(%s)' is received",
type, otype);
return;
}
} else
#endif
{
const char *otype = (obj ? obj->ob_type->tp_name : 0);
if (otype) {
PyObject *str = PyObject_Str(obj);
const char *cstr = str ? PyString_AsString(str) : 0;
if (cstr) {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, '%s(%s)' is received",
type, otype, cstr);
} else {
PyErr_Format(PyExc_TypeError, "a '%s' is expected, '%s' is received",
type, otype);
}
Py_XDECREF(str);
return;
}
}
PyErr_Format(PyExc_TypeError, "a '%s' is expected", type);
} else {
PyErr_Format(PyExc_TypeError, "unexpected type is received");
}
}
/* Convert a pointer value, signal an exception on a type mismatch */
SWIGRUNTIME void *
SWIG_Python_MustGetPtr(PyObject *obj, swig_type_info *ty, int argnum, int flags) {
void *result;
if (SWIG_Python_ConvertPtr(obj, &result, ty, flags) == -1) {
PyErr_Clear();
if (flags & SWIG_POINTER_EXCEPTION) {
SWIG_Python_TypeError(SWIG_TypePrettyName(ty), obj);
SWIG_Python_ArgFail(argnum);
}
}
return result;
}
#ifdef __cplusplus
#if 0
{ /* cc-mode */
#endif
}
#endif
/* -----------------------------------------------------------------------------*
Standard SWIG API for use inside user code.
Don't include this file directly, run the command
swig -python -external-runtime
Also, read the Modules chapter of the SWIG Manual.
* -----------------------------------------------------------------------------*/
#ifdef SWIG_MODULE_CLIENTDATA_TYPE
SWIGRUNTIMEINLINE swig_type_info *
SWIG_TypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {
swig_module_info *module = SWIG_GetModule(clientdata);
return SWIG_TypeQueryModule(module, module, name);
}
SWIGRUNTIMEINLINE swig_type_info *
SWIG_MangledTypeQuery(SWIG_MODULE_CLIENTDATA_TYPE clientdata, const char *name) {
swig_module_info *module = SWIG_GetModule(clientdata);
return SWIG_MangledTypeQueryModule(module, module, name);
}
#else
SWIGRUNTIMEINLINE swig_type_info *
SWIG_TypeQuery(const char *name) {
swig_module_info *module = SWIG_GetModule(NULL);
return SWIG_TypeQueryModule(module, module, name);
}
SWIGRUNTIMEINLINE swig_type_info *
SWIG_MangledTypeQuery(const char *name) {
swig_module_info *module = SWIG_GetModule(NULL);
return SWIG_MangledTypeQueryModule(module, module, name);
}
#endif
"""
|
beiko-lab/gengis
|
bin/Lib/site-packages/scipy/weave/swigptr2.py
|
Python
|
gpl-3.0
| 163,306
|
import networkx as nx
from EoN import *
import matplotlib.pyplot as plt
import numpy as np
import scipy
import random
from collections import defaultdict
colors = ['#5AB3E6', '#FF2000', '#009A80', '#E69A00', '#CD9AB3', '#0073B3', '#F0E442']
class TestSample:
@classmethod
def setup_class(cls):
print("setup_class() before any methods in this class")
@classmethod
def teardown_class(cls):
print("teardown_class() after any methods in this class")
def test_discrete_SIR(self):
print('testing discrete_SIR')
passed = True
G = nx.fast_gnp_random_graph(1000, 0.004)
def test_trans_fxn(u, v):
'''
Transmissions occur if one odd and one even. So it is basically bipartite.
'''
if (u + v) % 2 == 0:
return False
else:
return True
sim = EoN.discrete_SIR(G, test_transmission=test_trans_fxn, args=(), initial_infecteds=[0, 2, 4, 6, 8, 10],
return_full_data=True)
# by initial condition and transmission rule, infection generations alternate parity.
for node in G:
if 'I' in sim.node_history(node)[1]:
idx = sim.node_history(node)[1].index('I')
if (node + sim.node_history(node)[0][idx]) % 2 == 1: # should always be False
print('Error', node, sim.node_history(node))
passed = False
print('number infected', sim.R()[-1])
if not passed:
print('failed')
else:
print('passed')
assert passed
def test_basic_discrete_SIR(self):
print('testing basic_discrete_SIR, percolation_based_discrete_SIR, and EBCM_discrete_from_graph')
plt.clf()
N = 1000000
initial_count = 5000
G = nx.fast_gnp_random_graph(N, 4. / N)
p = 0.4
sim = EoN.basic_discrete_SIR(G, p, initial_infecteds=range(initial_count), return_full_data=True)
t, S, I, R = sim.summary()
sim2 = EoN.percolation_based_discrete_SIR(G, p, initial_infecteds=range(initial_count), return_full_data=True)
t2, S2, I2, R2 = sim2.summary()
t3, S3, I3, R3 = EoN.EBCM_discrete_from_graph(G, p, rho=float(initial_count) / N)
t4, S4, I4, R4 = EoN.EBCM_discrete_from_graph(G, p, initial_infecteds=range(initial_count))
print(t)
print(S)
print(I)
print(R)
print(t[0:4], S[0:4], I[0:4], R[0:4])
print(t2[0:4], S2[0:4], I2[0:4], R2[0:4])
print(t3[0:4], S3[0:4], I3[0:4], R3[0:4])
print(t4[0:4], S4[0:4], I4[0:4], R4[0:4])
plt.plot(t, S, label='basic sim', alpha=0.3)
plt.plot(t, I, alpha=0.3)
plt.plot(t, R, alpha=0.3)
plt.plot(t2, S2, '--', label='percolation based', alpha=0.3)
plt.plot(t2, I2, '--', alpha=0.3)
plt.plot(t2, R2, '--', alpha=0.3)
plt.plot(t3, S3, '-.', label='Discrete EBCM', alpha=0.3)
plt.plot(t3, I3, '-.', alpha=0.3)
plt.plot(t3, R3, '-.', alpha=0.3)
plt.plot(t4, S4, ':', label='Discrete EBCM 2', alpha=0.3)
plt.plot(t4, I4, ':', alpha=0.3)
plt.plot(t4, R4, ':', alpha=0.3)
plt.legend(loc='upper right')
filename = 'basic_discrete_SIR_test'
plt.savefig(filename)
print("check {} for good match".format(filename))
def test_estimate_SIR_prob_size(self):
print('testing estimate_SIR_prob_size')
N = 1000000
G = nx.fast_gnp_random_graph(N, 5. / N)
for p in scipy.linspace(0.1, 0.5, 5):
P, A = EoN.estimate_SIR_prob_size(G, p)
gamma = 1.
tau = p * gamma / (1 - p)
P2, A2 = EoN.estimate_directed_SIR_prob_size(G, tau, 1.0)
t, S, I, R = EoN.EBCM_discrete_from_graph(G, p)
print("should all be approximately the same: ", R[-1] / G.order(), A, A2)
def test_SIR_dynamics(self):
print("test_SIR_dynamics")
plt.clf()
reduced_report = scipy.linspace(0, 15, 31)
G = nx.configuration_model([1, 5, 10] * 100000)
N = G.order()
initial_size = 10000
gamma = 1.
tau = 0.3
t, S, I, R = EoN.fast_SIR(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, label='fast_SIR', alpha=0.3)
plt.plot(t, I, alpha=0.3)
plt.plot(t, R, alpha=0.3)
t, S, I, R = EoN.Gillespie_SIR(G, tau, gamma, initial_infecteds=range(initial_size))
plt.plot(t, S, '--', label='Gillespie_SIR', alpha=0.3)
plt.plot(t, I, '--', alpha=0.3)
plt.plot(t, R, '--', alpha=0.3)
t, S, I, R = EoN.EBCM_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, ':', label='EBCM', alpha=0.3)
plt.plot(t, I, ':', alpha=0.3)
plt.plot(t, R, ':', alpha=0.3)
t, S, I, R = EoN.EBCM_from_graph(G, tau, gamma, initial_infecteds=range(initial_size))
S, I, R = EoN.subsample(reduced_report, t, S, I, R)
t = reduced_report
plt.plot(t, S, 'x', label='EBCM', alpha=0.3)
plt.plot(t, I, 'x', alpha=0.3)
plt.plot(t, R, 'x', alpha=0.3)
t, S, I, R = EoN.SIR_compact_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, '-.', label='compact pairwise', alpha=0.3)
plt.plot(t, I, '-.', alpha=0.3)
plt.plot(t, R, '-.', alpha=0.3)
t, S, I, R = EoN.SIR_compact_pairwise_from_graph(G, tau, gamma, initial_infecteds=range(initial_size))
S, I, R = EoN.subsample(reduced_report, t, S, I, R)
t = reduced_report
plt.plot(t, S, 's', label='compact pairwise', alpha=0.3)
plt.plot(t, I, 's', alpha=0.3)
plt.plot(t, R, 's', alpha=0.3)
t, S, I, R = EoN.SIR_super_compact_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, '.', label='super compact pairwise', alpha=0.3)
plt.plot(t, I, '.', alpha=0.3)
plt.plot(t, R, '.', alpha=0.3)
t, S, I, R = EoN.SIR_super_compact_pairwise_from_graph(G, tau, gamma, initial_infecteds=range(initial_size))
S, I, R = EoN.subsample(reduced_report, t, S, I, R)
t = reduced_report
plt.plot(t, S, 'd', label='super compact pairwise', alpha=0.3)
plt.plot(t, I, 'd', alpha=0.3)
plt.plot(t, R, 'd', alpha=0.3)
t, S, I, R = EoN.SIR_effective_degree_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, 'x', label='effective degree', alpha=0.3)
plt.plot(t, I, 'x', alpha=0.3)
plt.plot(t, R, 'x', alpha=0.3)
t, S, I, R = EoN.SIR_effective_degree_from_graph(G, tau, gamma, initial_infecteds=range(initial_size))
S, I, R = EoN.subsample(reduced_report, t, S, I, R)
t = reduced_report
plt.plot(t, S, '^', label='effective degree', alpha=0.3)
plt.plot(t, I, '^', alpha=0.3)
plt.plot(t, R, '^', alpha=0.3)
t, S, I, R = EoN.SIR_compact_effective_degree_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, '+', label='compact effective degree', alpha=0.3)
plt.plot(t, I, '+', alpha=0.3)
plt.plot(t, R, '+', alpha=0.3)
t, S, I, R = EoN.SIR_compact_effective_degree_from_graph(G, tau, gamma, initial_infecteds=range(initial_size))
S, I, R = EoN.subsample(reduced_report, t, S, I, R)
t = reduced_report
plt.plot(t, S, 'v', label='compact effective degree', alpha=0.3)
plt.plot(t, I, 'v', alpha=0.3)
plt.plot(t, R, 'v', alpha=0.3)
t, S, I, R = EoN.SIR_heterogeneous_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, 'o', label='heterogeneous pairwise', alpha=0.3)
plt.plot(t, I, 'o', alpha=0.3)
plt.plot(t, R, 'o', alpha=0.3)
t, S, I, R = EoN.SIR_heterogeneous_pairwise_from_graph(G, tau, gamma, initial_infecteds=range(initial_size))
S, I, R = EoN.subsample(reduced_report, t, S, I, R)
t = reduced_report
plt.plot(t, S, '>', label='heterogeneous pairwise', alpha=0.3)
plt.plot(t, I, '>', alpha=0.3)
plt.plot(t, R, '>', alpha=0.3)
# t, S, I, R = EoN.SIR_heterogeneous_meanfield_from_graph(G, tau, gamma, rho = float(initial_size)/N)
# plt.plot(t, S, '-.', label = 'heterogeneous meanfield')
# plt.plot(t, I, '-.')
# plt.plot(t, R, '-.')
plt.axis(xmin=-20, xmax=15)
plt.legend(loc='center left')
plt.savefig('SIR_dynamics')
def test_SIS_dynamics(self):
print("test_SIS_dynamics")
plt.clf()
reduced_report = scipy.linspace(0, 15, 31)
G = nx.configuration_model([1, 5, 10] * 100000)
G = nx.Graph(G)
G.remove_edges_from(G.selfloop_edges())
N = G.order()
initial_size = 5000
gamma = 1.
tau = 0.3
print('\tfast_SIS')
t, S, I = EoN.fast_SIS(G, tau, gamma, initial_infecteds=range(initial_size), tmax=15)
plt.plot(t, S, label='fast_SIS', alpha=0.3)
plt.plot(t, I, alpha=0.3)
print('\tfast_nonMarkov_SIS')
def trans_time_fxn(source, target, rec_delay, tau):
r = []
d = random.expovariate(tau)
while d < rec_delay:
r.append(d)
d += random.expovariate(tau)
return r
def rec_time_fxn(u, gamma):
return random.expovariate(gamma)
t, S, I = EoN.fast_nonMarkov_SIS(G, trans_time_fxn, rec_time_fxn, trans_time_args=(tau,), rec_time_args=(gamma,),
initial_infecteds=range(initial_size), tmax=15)
plt.plot(t, S, ':', label='fast_nonMarkov_SIS', alpha=0.3)
plt.plot(t, I, ':', alpha=0.3)
print('\tGillespie_SIS')
t, S, I = EoN.Gillespie_SIS(G, tau, gamma, initial_infecteds=range(initial_size), tmax=15)
plt.plot(t, S, '--', label='Gillespie_SIS', alpha=0.3)
plt.plot(t, I, '--', alpha=0.3)
print('\tSIS_compact_pairwise')
t, S, I = EoN.SIS_compact_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, '-.', label='compact pairwise', alpha=0.3)
plt.plot(t, I, '-.', alpha=0.3)
t, S, I = EoN.SIS_compact_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
S, I = EoN.subsample(reduced_report, t, S, I)
plt.plot(reduced_report, S, 's', label='compact pairwise', alpha=0.3)
plt.plot(reduced_report, I, 's', alpha=0.3)
print('\tSIS_super_compact_pairwise')
t, S, I = EoN.SIS_super_compact_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, 'o', label='super compact pairwise', alpha=0.3)
plt.plot(t, I, 'o', alpha=0.3)
t, S, I = EoN.SIS_super_compact_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
S, I = EoN.subsample(reduced_report, t, S, I)
plt.plot(reduced_report, S, 'd', label='super compact pairwise', alpha=0.3)
plt.plot(reduced_report, I, 'd', alpha=0.3)
print('\tSIS_effective_degree')
t, S, I = EoN.SIS_effective_degree_from_graph(G, tau, gamma, tmax=15, rho=float(initial_size) / N)
plt.plot(t, S, 'x', label='effective degree', alpha=0.3)
plt.plot(t, I, 'x', alpha=0.3)
t, S, I = EoN.SIS_effective_degree_from_graph(G, tau, gamma, tmax=15, rho=float(initial_size) / N)
S, I = EoN.subsample(reduced_report, t, S, I)
plt.plot(reduced_report, S, '^', label='effective degree', alpha=0.3)
plt.plot(reduced_report, I, '^', alpha=0.3)
print('\tSIS_compact_effective_degree')
t, S, I = EoN.SIS_compact_effective_degree_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, '+', label='compact effective degree', alpha=0.3)
plt.plot(t, I, '+', alpha=0.3)
t, S, I = EoN.SIS_compact_effective_degree_from_graph(G, tau, gamma, rho=float(initial_size) / N)
S, I = EoN.subsample(reduced_report, t, S, I)
plt.plot(reduced_report, S, '>', label='compact effective degree', alpha=0.3)
plt.plot(reduced_report, I, '>', alpha=0.3)
print('\tSIS_heterogeneous_pairwise')
t, S, I = EoN.SIS_heterogeneous_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
plt.plot(t, S, '.', label='heterogeneous pairwise', alpha=0.3)
plt.plot(t, I, '.', alpha=0.3)
t, S, I = EoN.SIS_heterogeneous_pairwise_from_graph(G, tau, gamma, rho=float(initial_size) / N)
S, I = EoN.subsample(reduced_report, t, S, I)
plt.plot(reduced_report, S, 'v', label='heterogeneous pairwise', alpha=0.3)
plt.plot(reduced_report, I, 'v', alpha=0.3)
# t, S, I = EoN.SIS_heterogeneous_meanfield_from_graph(G, tau, gamma, rho = float(initial_size)/N)
# plt.plot(t, S, '-.', label = 'heterogeneous meanfield')
# plt.plot(t, I, '-.')
plt.axis(ymin=0, xmax=15)
plt.legend()
print('saving figure')
plt.savefig('SIS_dynamics.pdf')
def test_SIS_simulations(self):
print("test_SIS_simulations")
plt.clf()
tau = 0.1
gamma = 0.3
G = nx.configuration_model([1, 5, 10] * 100000)
G = nx.Graph(G)
G.remove_edges_from(G.selfloop_edges())
N = G.order()
initial_size = 5000
for counter in range(10):
print('fast_SIS')
t, S, I = EoN.fast_SIS(G, tau, gamma, initial_infecteds=range(initial_size), tmax=20)
plt.plot(t, S, '-.', color='b', alpha=0.3)
plt.plot(t, I, '-.', color='b', alpha=0.3)
print('Gillespie_SIS')
t, S, I = EoN.Gillespie_SIS(G, tau, gamma, initial_infecteds=range(initial_size), tmax=20)
plt.plot(t, S, '--', color='r', alpha=0.3)
plt.plot(t, I, '--', color='r', alpha=0.3)
plt.title('curves should overlie to show event driven and gillespie agree')
plt.savefig('SIS_sims')
def test_SIR_final_sizes(self):
print("test_SIR_final_sizes")
plt.clf()
G = nx.configuration_model([3, 6, 3, 6, 20] * 10000)
N = G.order()
tau = 0.2
gamma = 1
t, S, I, R = EoN.fast_SIR(G, tau, gamma, initial_infecteds=range(5000))
plt.plot(t, S, color=colors[0], label='simulation', alpha=0.3)
plt.plot(t, I, color=colors[0], alpha=0.3)
plt.plot(t, R, color=colors[0], alpha=0.3)
infected_nodes = EoN.get_infected_nodes(G, tau, gamma, initial_infecteds=range(5000))
A = len(infected_nodes)
print(A)
plt.plot([0, 10], [A, A], label=r'percolation based', alpha=0.3)
A = EoN.Attack_rate_cts_time_from_graph(G, tau, gamma, rho=0.1)
plt.plot([0, 10], [A * N, A * N], '-.', label='analytic', alpha=0.3)
plt.legend(loc='upper right')
plt.savefig('test_SIR_final_sizes')
# ======================================================================
# ERROR: EoN.tests.test_from_joel.test_SIR_individual_based
# ----------------------------------------------------------------------
# Traceback (most recent call last):
# File "c:\users\tting\appdata\local\programs\python\python36\lib\site-packages\nose\case.py", line 198, in runTest
# self.test(*self.arg)
# File "C:\GitRepos\tinghf_Math_Epidemics_Networks\EoN\tests\test_from_joel.py", line 347, in test_SIR_individual_based
# t, S, I, R = EoN.SIR_individual_based(G, nodelist, X, Y, tau, gamma=gamma, tmax=20)
# TypeError: SIR_individual_based() got multiple values for argument 'gamma'
# test_SIR_individual_based
# def test_SIR_individual_based():
# print("test_SIR_individual_based")
# plt.clf()
#
# G = nx.configuration_model([3, 10] * 10000)
# tau = 0.3
# gamma = 1
# N = G.order()
#
# initial_infecteds = scipy.random.choice(G.nodes(), size=100, replace=False)
# rho = len(initial_infecteds) * 1. / N
#
# t, S, I, R = EoN.fast_SIR(G, tau, gamma, initial_infecteds=initial_infecteds)
# plt.plot(t, S, '--', label='simulation', alpha=0.3)
# plt.plot(t, I, '--', alpha=0.3)
# plt.plot(t, R, '--', alpha=0.3)
# nodelist = G.nodes()
# X = (1 - rho) * scipy.ones(N)
# Y = rho * scipy.ones(N)
#
# t, S, I, R = EoN.SIR_individual_based(G, nodelist, X, Y, tau, gamma=gamma, tmax=20)
#
# plt.plot(t, S, label='individual-based equations', alpha=0.3)
# plt.plot(t, I, alpha=0.3)
# plt.plot(t, R, alpha=0.3)
# plt.legend(loc='upper right', alpha=0.3)
# plt.savefig('SIR_individual_based')
# ======================================================================
# ERROR: EoN.tests.test_from_joel.test_SIS_individual_based
# ----------------------------------------------------------------------
# Traceback (most recent call last):
# File "c:\users\tting\appdata\local\programs\python\python36\lib\site-packages\nose\case.py", line 198, in runTest
# self.test(*self.arg)
# File "C:\GitRepos\tinghf_Math_Epidemics_Networks\EoN\tests\test_from_joel.py", line 363, in test_SIS_individual_based
# t, S, I = EoN.SIS_individual_based(G, nodelist, Y, 0.3, gamma=1, tmax=20)
# TypeError: SIS_individual_based() got multiple values for argument 'gamma'
# def test_SIS_individual_based():
# print('test_SIS_individual_based')
# G = nx.configuration_model([3, 10] * 1000)
# nodelist = G.nodes()
# N = G.order()
# rho = 1. / N
# Y = rho * scipy.ones(N)
# t, S, I = EoN.SIS_individual_based(G, nodelist, Y, 0.3, gamma=1, tmax=20)
# plt.clf()
# plt.plot(t, S)
# plt.plot(t, I)
# plt.savefig('SIS_individual_based')
def test_pair_based(self):
print("test_pair_based")
G = nx.fast_gnp_random_graph(1000, 0.004)
nodelist = G.nodes()
Y0 = scipy.array([1 if node < 10 else 0 for node in nodelist])
print('testing SIS_pair_based')
t, S, I = EoN.SIS_pair_based(G, 2, 0.5, rho=0.01, tmax=5, tcount=101)
print('creating SIS fig')
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.savefig('SIS_pair_based')
print('testing SIR_pair_based')
t, S, I, R = EoN.SIR_pair_based(G, 2, 0.5, tmax=5, tcount=101)
print('creating SIR fig')
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.plot(t, R)
plt.savefig('SIR_pair_based')
# ======================================================================
# ERROR: EoN.tests.test_from_joel.test_SIR_pair_based2
# ----------------------------------------------------------------------
# Traceback (most recent call last):
# File "c:\users\tting\appdata\local\programs\python\python36\lib\site-packages\nose\case.py", line 198, in runTest
# self.test(*self.arg)
# File "C:\GitRepos\tinghf_Math_Epidemics_Networks\EoN\tests\test_from_joel.py", line 401, in test_SIR_pair_based2
# t, S, I, R = EoN.SIR_pair_based2(G, tau, gamma=gamma, nodelist=nodelist, Y0=Y0, tmax=5, tcount=101)
# AttributeError: module 'EoN' has no attribute 'SIR_pair_based2'
# def test_SIR_pair_based2():
# print("test_SIR_pair_based2")
# G = nx.fast_gnp_random_graph(1000, 0.004)
# nodelist = G.nodes()
# Y0 = scipy.array([1 if node < 10 else 0 for node in nodelist])
# tau = 2
# gamma = 0.5
# t, S, I, R = EoN.SIR_pair_based2(G, tau, gamma=gamma, nodelist=nodelist, Y0=Y0, tmax=5, tcount=101)
# print('creating SIR fig2')
# plt.clf()
# plt.plot(t, S)
# plt.plot(t, I)
# plt.plot(t, R)
# plt.savefig('SIR_pair_based2')
# print('done with pair_based2')
def test_SIS_homogeneous_meanfield(self):
print("testing SIS_homogeneous_meanfield")
S0 = 99
I0 = 1
n = 1
tau = 3
gamma = 2
t, S, I = EoN.SIS_homogeneous_meanfield(S0, I0, n, tau, gamma, tmax=10)
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.savefig('SIS_homogeneous_meanfield')
def test_SIR_homogeneous_meanfield(self):
print("testing SIR_homogeneous_meanfield")
S0 = 97
I0 = 2
R0 = 1
n = 1
tau = 3
gamma = 2
t, S, I, R = EoN.SIR_homogeneous_meanfield(S0, I0, R0, n, tau, gamma, tmax=10)
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.plot(t, R)
plt.savefig('SIR_homogeneous_meanfield')
def test_SIS_homogeneous_pairwise(self):
print("test_SIS_homogeneous_pairwise")
S0 = 99
I0 = 1
SI0 = 10
SS0 = 980
n = 10
tau = 1
gamma = 5
t, S, I = EoN.SIS_homogeneous_pairwise(S0, I0, SI0, SS0, n, tau, gamma, tmax=5)
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.savefig('SIS_homogeneous_pairwise')
def test_SIR_homogeneous_pairwise(self):
print("test_SIR_homogeneous_pairwise")
S0 = 97
I0 = 2
R0 = 1
SI0 = 18
SS0 = 941
n = 10
tau = 0.4
gamma = 1
t, S, I, R = EoN.SIR_homogeneous_pairwise(S0, I0, R0, SI0, SS0, n, tau, gamma, tmax=10)
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.plot(t, R)
plt.savefig('SIR_homogeneous_pairwise')
def test_SIS_heterogeneous_meanfield(self):
print("testing SIS_heterogeneous_meanfield")
Sk0 = scipy.arange(100) * 100
Ik0 = scipy.arange(100)
t, S, I = EoN.SIS_heterogeneous_meanfield(Sk0, Ik0, 1, 10, tmax=1)
print("plotting SIS_heterogeneous_meanfield")
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.savefig('SIS_heterogeneous_meanfield')
def test_SIR_heterogeneous_meanfield(self):
print("testing SIR_heterogeneous_meanfield")
Sk0 = scipy.arange(100) * 100
Ik0 = scipy.arange(100)
Rk0 = 0 * scipy.arange(100)
t, S, I, R = EoN.SIR_heterogeneous_meanfield(Sk0, Ik0, Rk0, 0.1, 5, tmax=5)
print("plotting SIR_heterogeneous_meanfield")
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.plot(t, R)
plt.savefig('SIR_heterogeneous_meanfield')
def test_SIS_heterogeneous_pairwise(self):
print("test_SIS_heterogeneous_pairwise")
# graph will be 2 stars: both with 3 "leaves". one of them has central node infected
SkSl0 = scipy.array([[0, 0, 0, 0], [0, 0, 0, 3], [0, 0, 0, 0], [0, 3, 0, 0]])
SkIl0 = scipy.array([[0, 0, 0, 0], [0, 0, 0, 3], [0, 0, 0, 0], [0, 0, 0, 0]])
IkIl0 = scipy.zeros((4, 4))
print((SkSl0 + SkIl0).T / (scipy.array([1, 0, 0, 0]) + scipy.arange(4.)))
Sk0 = sum((SkSl0 + SkIl0).T / (scipy.array([1, 0, 0, 0]) + scipy.arange(4.)))
Ik0 = sum((SkIl0.T + IkIl0).T / (scipy.array([1, 0, 0, 0]) + scipy.arange(4.)))
Sk0[0] = 1
print('Sk0', Sk0)
print('Ik0', Ik0)
tau = 3
gamma = 1
# print(SkIl0, SkSl0
# print(Sk0
# print(Ik0
t, S, I = EoN.SIS_heterogeneous_pairwise(Sk0, Ik0, SkSl0, SkIl0, IkIl0, tau, gamma, tmax=10)
plt.clf()
plt.plot(t, S, label='pure IC')
plt.plot(t, I)
G = nx.Graph()
G.add_edges_from([(1, 2), (1, 3), (1, 4), (5, 6), (5, 7), (5, 8)])
G.add_node(0)
t, S, I = EoN.SIS_heterogeneous_pairwise_from_graph(G, tau, gamma, rho=1. / 9, tmax=10)
plt.plot(t, S, '-.', label='uniform')
plt.plot(t, I, '-.')
plt.legend(loc='upper right')
plt.title('starting from different IC')
plt.savefig('SIS_heterogeneous_pairwise')
def test_SIR_heterogeneous_pairwise(self):
print("SIR_heterogeneous_pairwise not yet tested")
def test_SIS_compact_pairwise(self):
print("testing SIS_compact_pairwise")
EoN.EoNError('changing order of arguments')
Sk0 = scipy.arange(100) * 100
Ik0 = scipy.arange(100)
SI0 = Ik0.dot(scipy.arange(100))
SS0 = Sk0.dot(scipy.arange(100)) - SI0
II0 = 0
tau = 0.1
gamma = 0.3
t, S, I = EoN.SIS_compact_pairwise(Sk0, Ik0, SI0, SS0, II0, tau, gamma, tmax=5)
print("plotting SIS_compact_pairwise")
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.savefig('SIS_compact_pairwise')
def test_SIR_compact_pairwise(self):
EoN.EoNError('changing order of arguments')
print("testing SIR_compact_pairwise")
Sk0 = scipy.arange(100) * 100
I0 = sum(scipy.arange(100))
R0 = 0
SI0 = 1000
SS0 = Sk0.dot(scipy.arange(100)) - SI0
tau = 0.1
gamma = 0.3
t, S, I, R = EoN.SIR_compact_pairwise(Sk0, I0, R0, SI0, SS0, tau, gamma, tmax=5)
print("plotting SIR_compact_pairwise")
plt.clf()
plt.plot(t, S)
plt.plot(t, I)
plt.plot(t, R)
plt.savefig('SIR_compact_pairwise')
def test_SIS_super_compact_pairwise(self):
print("SIS_super_compact_pairwise not yet tested")
def test_SIR_super_compact_pairwise(self):
print("SIR_super_compact_pairwise not yet tested")
def test_SIS_effective_degree(self):
print("SIS_effective_degree not yet tested")
def test_SIR_effective_degree(self):
print("SIR_effective_degree not yet tested")
def test_SIS_compact_effective_degree(self):
print("SIS_compact_effective_degree not yet tested")
def test_SIR_compact_effective_degree(self):
print("SIR_compact_effective_degree not yet tested")
def test_ErdősRényi_million_Fast_Gillespie_SIR(self):
print("testing ErdősRényi_million_Fast_Gillespie_SIR")
N = 10 ** 6 # number of individuals
kave = 5 # expected number of partners
G = nx.fast_gnp_random_graph(N, kave / (N - 1)) # Erdős-Rényi graph
rho = 0.005 # initial fraction infected
tau = 0.3 # transmission rate
gamma = 1.0 # recovery rate
t1, S1, I1, R1 = EoN.fast_SIR(G, tau, gamma, rho=rho)
t2, S2, I2, R2 = EoN.Gillespie_SIR(G, tau, gamma, rho=rho)
plt.plot(t1, I1, label='fast_SIR')
plt.plot(t2, I2, label='Gillespie_SIR')
plt.legend()
plt.savefig('test_ErdősRényi_million_Fast_Gillespie_SIR')
def test_ErdősRényi_million_Fast_Gillespie_SIS(self):
N = 10 ** 5 # number of individuals
kave = 5 # expected number of partners
G = nx.fast_gnp_random_graph(N, kave / (N - 1)) # Erdős-Rényi graph
rho = 0.005 # initial fraction infected
tau = 0.3 # transmission rate
gamma = 1.0 # recovery rate
t1, S1, I1 = EoN.fast_SIS(G, tau, gamma, rho=rho, tmax=30)
t2, S2, I2 = EoN.Gillespie_SIS(G, tau, gamma, rho=rho, tmax=30)
plt.plot(t1, I1, label='fast_SIS')
plt.plot(t2, I2, label='Gillespie_SIS')
plt.legend()
plt.savefig('test_ErdősRényi_million_Fast_Gillespie_SIS')
def test_fast_nonMarkov_SIR(self):
def rec_time_fxn_gamma(u):
# gamma(shape, scale = 1.0)
return np.random.gamma(3, 0.5)
def trans_time_fxn(u, v, tau):
if tau > 0:
return np.random.exponential(1. / tau)
else:
return float('Inf')
N = 10 ** 6 # number of individuals
kave = 5 # expected number of partners
G = nx.fast_gnp_random_graph(N, kave / (N - 1)) # Erdős-Rényi graph
tau = 0.3
for cntr in range(10):
t, S, I, R = EoN.fast_nonMarkov_SIR(G, trans_time_fxn=trans_time_fxn,
rec_time_fxn=rec_time_fxn_gamma, trans_time_args=(tau,))
plt.plot(t, R)
plt.savefig('test_fast_nonMarkov_SIR')
def test_SIS_Pairwise_Model(self):
N = 10000
gamma = 1
rho = 0.05
kave = 20
tau = 2 * gamma / kave
S0 = (1 - rho) * N
I0 = rho * N
SI0 = (1 - rho) * kave * rho * N
SS0 = (1 - rho) * kave * (1 - rho) * N
t, S, I = EoN.SIS_homogeneous_pairwise(S0, I0, SI0, SS0, kave, tau, gamma,
tmax=10)
plt.plot(t, S, label='S')
plt.plot(t, I, label='I')
plt.legend()
plt.savefig('test_SIS_Pairwise_Model')
def test_SIR_EBCM(self):
gamma = 1
tau = 1.5
kave = 3
rho = 0.01
phiS0 = 1 - rho
def psi(x):
return (1 - rho) * np.exp(-kave * (1 - x))
def psiPrime(x):
return (1 - rho) * kave * np.exp(-kave * (1 - x))
N = 1
t, S, I, R = EoN.EBCM(N, psi, psiPrime, tau, gamma, phiS0, tmax=10)
plt.plot(t, S, label='S')
plt.plot(t, I, label='I')
plt.plot(t, R, label='R')
plt.legend()
plt.savefig('test_SIR_EBCM')
def test_Gillespie_simple_contagion(self):
N = 100000
G = nx.fast_gnp_random_graph(N, 5. / (N - 1))
# they will vary in the rate of leaving exposed class.
# and edges will vary in transition rate.
# there is no variation in recovery rate.
node_attribute_dict = {node: 0.5 + random.random() for node in G.nodes()}
edge_attribute_dict = {edge: 0.5 + random.random() for edge in G.edges()}
nx.set_node_attributes(G, values=node_attribute_dict, name='expose2infect_weight')
nx.set_edge_attributes(G, values=edge_attribute_dict, name='transmission_weight')
#
# These individual and partnership attributes will be used to scale
# the transition rates. When we define `H` and `J`, we provide the name
# of these attributes.
# We show how node and edge attributes in the contact network 'G' can be used
# to scale the transmission rates. More advanced techniques are shown in
# the documentation
H = nx.DiGraph()
H.add_node('S') # This line is actually unnecessary.
H.add_edge('E', 'I', rate=0.6, weight_label='expose2infect_weight')
H.add_edge('I', 'R', rate=0.1)
J = nx.DiGraph()
J.add_edge(('I', 'S'), ('I', 'E'), rate=0.1, weight_label='transmission_weight')
IC = defaultdict(lambda: 'S')
for node in range(200):
IC[node] = 'I'
return_statuses = ('S', 'E', 'I', 'R')
t, S, E, I, R = EoN.Gillespie_simple_contagion(G, H, J, IC, return_statuses,
tmax=float('Inf'))
plt.plot(t, S, label='Susceptible')
plt.plot(t, E, label='Exposed')
plt.plot(t, I, label='Infected')
plt.plot(t, R, label='Recovered')
plt.legend()
plt.savefig('test_Gillespie_simple_contagion')
def test_Two_Cooperative_SIR_Diseases_oscillatory(self):
# N = 1000000
N = 300000
G = nx.fast_gnp_random_graph(N, 5. / (N - 1))
print('got G')
# In the below:
# 'SS' means an individual susceptible to both diseases
# 'SI' means susceptible to disease 1 and infected with disease 2
# 'RS' means recovered from disease 1 and susceptible to disease 2.
# etc.
H = nx.DiGraph() # DiGraph showing possible transitions that don't require an interaction
H.add_node('SS') # we actually don't need to include the 'SS' node in H.
H.add_edge('SI', 'SR', rate=1)
H.add_edge('IS', 'RS', rate=1)
H.add_edge('II', 'IR', rate=1)
H.add_edge('II', 'RI', rate=1)
H.add_edge('IR', 'RR', rate=0.5)
H.add_edge('RI', 'RR', rate=0.5)
# In the below the edge (('SI', 'SS'), ('SI', 'SI')) means an
# 'SI' individual connected to an 'SS' individual can lead to a transition in which
# the 'SS' individual becomes 'SI'. The rate of this transition is 0.2.
#
# Note that `IR` and `RI` individuals are more infectious than other individuals.
#
J = nx.DiGraph() # DiGraph showing transitiona that do require an interaction.
J.add_edge(('SI', 'SS'), ('SI', 'SI'), rate=0.2)
J.add_edge(('SI', 'IS'), ('SI', 'II'), rate=0.2)
J.add_edge(('SI', 'RS'), ('SI', 'RI'), rate=0.2)
J.add_edge(('II', 'SS'), ('II', 'SI'), rate=0.2)
J.add_edge(('II', 'IS'), ('II', 'II'), rate=0.2)
J.add_edge(('II', 'RS'), ('II', 'RI'), rate=0.2)
J.add_edge(('RI', 'SS'), ('RI', 'SI'), rate=1)
J.add_edge(('RI', 'IS'), ('RI', 'II'), rate=1)
J.add_edge(('RI', 'RS'), ('RI', 'RI'), rate=1)
J.add_edge(('IS', 'SS'), ('IS', 'IS'), rate=0.2)
J.add_edge(('IS', 'SI'), ('IS', 'II'), rate=0.2)
J.add_edge(('IS', 'SR'), ('IS', 'IR'), rate=0.2)
J.add_edge(('II', 'SS'), ('II', 'IS'), rate=0.2)
J.add_edge(('II', 'SI'), ('II', 'II'), rate=0.2)
J.add_edge(('II', 'SR'), ('II', 'IR'), rate=0.2)
J.add_edge(('IR', 'SS'), ('IR', 'IS'), rate=1)
J.add_edge(('IR', 'SI'), ('IR', 'II'), rate=1)
J.add_edge(('IR', 'SR'), ('IR', 'IR'), rate=1)
return_statuses = ('SS', 'SI', 'SR', 'IS', 'II', 'IR', 'RS', 'RI', 'RR')
# initial_size = 650
initial_size = 650
IC = defaultdict(lambda: 'SS')
for individual in range(initial_size):
IC[individual] = 'II'
print('got IC')
t, SS, SI, SR, IS, II, IR, RS, RI, RR = EoN.Gillespie_simple_contagion(G, H, J, IC, return_statuses,
tmax=float('Inf'))
plt.semilogy(t, IS + II + IR, '-.', label='Infected with disease 1')
plt.semilogy(t, SI + II + RI, '-.', label='Infected with disease 2')
plt.legend()
plt.savefig('test_Two_Cooperative_SIR_Diseases_oscillatory')
def test_Gillespie_complex_contagion(self):
def transition_rate(G, node, status, parameters):
# this function needs to return the rate at which ``node`` changes status
#
r = parameters[0]
if status[node] == 'S' and len([nbr for nbr in G.neighbors(node) if status[nbr] == 'I']) > 1:
return 1
else: # status[node] might be 0 or length might be 0 or 1.
return 0
def transition_choice(G, node, status, parameters):
# this function needs to return the new status of node. We assume going
# in that we have already calculated it is changing status.
#
# this function could be more elaborate if there were different
# possible transitions that could happen. However, for this model,
# the 'I' nodes aren't changing status, and the 'S' ones are changing to 'I'
# So if we're in this function, the node must be 'S' and becoming 'I'
#
return 'I'
def get_influence_set(G, node, status, parameters):
# this function needs to return any node whose rates might change
# because ``node`` has just changed status. That is, which nodes
# might ``node`` influence?
#
# For our models the only nodes a node might affect are the susceptible neighbors.
return {nbr for nbr in G.neighbors(node) if status[nbr] == 'S'}
parameters = (2,) # this is the threshold. Note the comma. It is needed
# for python to realize this is a 1-tuple, not just a number.
# ``parameters`` is sent as a tuple so we need the comma.
N = 60000
deg_dist = [2, 4, 6] * int(N / 3)
G = nx.configuration_model(deg_dist)
for rho in np.linspace(3. / 80, 7. / 80, 8): # 8 values from 3/80 to 7/80.
print(rho)
IC = defaultdict(lambda: 'S')
for node in G.nodes():
if np.random.random() < rho: # there are faster ways to do this random selection
IC[node] = 'I'
t, S, I = EoN.Gillespie_complex_contagion(G, transition_rate, transition_choice,
get_influence_set, IC, return_statuses=('S', 'I'),
parameters=parameters)
plt.plot(t, I)
plt.savefig('test_Gillespie_complex_contagion')
def test_Snapshot_Dynamics_And_TransmissionTree(self):
G = nx.karate_club_graph()
nx_kwargs = {"with_labels": True}
sim = EoN.Gillespie_SIR(G, 1, 1, return_full_data=True)
sim.display(time=1, **nx_kwargs)
plt.savefig('test_Snapshot_Dynamics_And_TransmissionTree_1')
T = sim.transmission_tree()
Tpos = EoN.hierarchy_pos(T)
fig = plt.figure(figsize=(8, 5))
ax = fig.add_subplot(111)
nx.draw(T, Tpos, ax=ax, node_size=200, with_labels=True)
plt.savefig('test_Snapshot_Dynamics_And_TransmissionTree_2')
def test_Animation_Dynamics_SIR_With_Vaccination_In_Lattice(self):
G = nx.grid_2d_graph(100, 100) # each node is (u,v) where 0<=u,v<=99
# we'll initially infect those near the middle
initial_infections = [(u, v) for (u, v) in G if 45 < u < 55 and 45 < v < 55]
H = nx.DiGraph() # the spontaneous transitions
H.add_edge('Sus', 'Vac', rate=0.01)
H.add_edge('Inf', 'Rec', rate=1.0)
J = nx.DiGraph() # the induced transitions
J.add_edge(('Inf', 'Sus'), ('Inf', 'Inf'), rate=2.0)
IC = defaultdict(lambda: 'Sus') # initial condition
for node in initial_infections:
IC[node] = 'Inf'
return_statuses = ['Sus', 'Inf', 'Rec', 'Vac']
color_dict = {'Sus': '#009a80', 'Inf': '#ff2000', 'Rec': 'gray', 'Vac': '#5AB3E6'}
pos = {node: node for node in G}
tex = False
sim_kwargs = {'color_dict': color_dict, 'pos': pos, 'tex': tex}
sim = EoN.Gillespie_simple_contagion(G, H, J, IC, return_statuses, tmax=30,
return_full_data=True, sim_kwargs=sim_kwargs)
times, D = sim.summary()
#
# imes is a numpy array of times. D is a dict, whose keys are the entries in
# return_statuses. The values are numpy arrays giving the number in that
# status at the corresponding time.
newD = {'Sus+Vac': D['Sus'] + D['Vac'], 'Inf+Rec': D['Inf'] + D['Rec']}
#
# newD is a new dict giving number not yet infected or the number ever infected
# Let's add this timeseries to the simulation.
#
new_timeseries = (times, newD)
sim.add_timeseries(new_timeseries, label='Simulation',
color_dict={'Sus+Vac': '#E69A00', 'Inf+Rec': '#CD9AB3'})
sim.display(time=6, node_size=4, ts_plots=[['Inf'], ['Sus+Vac', 'Inf+Rec']])
plt.savefig('test_Animation_Dynamics_SIR_With_Vaccination_In_Lattice')
ani = sim.animate(ts_plots=[['Inf'], ['Sus+Vac', 'Inf+Rec']], node_size=4)
ani.save('test_Animation_Dynamics_SIR_With_Vaccination_In_Lattice.mp4', fps=5, extra_args=['-vcodec', 'libx264'])
|
springer-math/Mathematics-of-Epidemics-on-Networks
|
EoN/tests/test_from_joel.py
|
Python
|
mit
| 39,523
|
"""
This file contains celery tasks for email marketing signal handler.
"""
import logging
import time
from celery import task
from django.core.cache import cache
from email_marketing.models import EmailMarketingConfiguration
from student.models import EnrollStatusChange
from sailthru.sailthru_client import SailthruClient
from sailthru.sailthru_error import SailthruClientError
log = logging.getLogger(__name__)
# pylint: disable=not-callable
@task(bind=True, default_retry_delay=3600, max_retries=24)
def update_user(self, sailthru_vars, email, new_user=False, activation=False):
"""
Adds/updates Sailthru profile information for a user.
Args:
sailthru_vars(dict): User profile information to pass as 'vars' to Sailthru
email(str): User email address
new_user(boolean): True if new registration
activation(boolean): True if activation request
Returns:
None
"""
email_config = EmailMarketingConfiguration.current()
if not email_config.enabled:
return
sailthru_client = SailthruClient(email_config.sailthru_key, email_config.sailthru_secret)
try:
sailthru_response = sailthru_client.api_post("user",
_create_sailthru_user_parm(sailthru_vars, email,
new_user, email_config))
except SailthruClientError as exc:
log.error("Exception attempting to add/update user %s in Sailthru - %s", email, unicode(exc))
raise self.retry(exc=exc,
countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
log.error("Error attempting to add/update user in Sailthru: %s", error.get_message())
if _retryable_sailthru_error(error):
raise self.retry(countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
return
# if activating user, send welcome email
if activation and email_config.sailthru_activation_template:
try:
sailthru_response = sailthru_client.api_post("send",
{"email": email,
"template": email_config.sailthru_activation_template})
except SailthruClientError as exc:
log.error("Exception attempting to send welcome email to user %s in Sailthru - %s", email, unicode(exc))
raise self.retry(exc=exc,
countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
log.error("Error attempting to send welcome email to user in Sailthru: %s", error.get_message())
if _retryable_sailthru_error(error):
raise self.retry(countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
# pylint: disable=not-callable
@task(bind=True, default_retry_delay=3600, max_retries=24)
def update_user_email(self, new_email, old_email):
"""
Adds/updates Sailthru when a user email address is changed
Args:
username(str): A string representation of user identifier
old_email(str): Original email address
Returns:
None
"""
email_config = EmailMarketingConfiguration.current()
if not email_config.enabled:
return
# ignore if email not changed
if new_email == old_email:
return
sailthru_parms = {"id": old_email, "key": "email", "keysconflict": "merge", "keys": {"email": new_email}}
try:
sailthru_client = SailthruClient(email_config.sailthru_key, email_config.sailthru_secret)
sailthru_response = sailthru_client.api_post("user", sailthru_parms)
except SailthruClientError as exc:
log.error("Exception attempting to update email for %s in Sailthru - %s", old_email, unicode(exc))
raise self.retry(exc=exc,
countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
log.error("Error attempting to update user email address in Sailthru: %s", error.get_message())
if _retryable_sailthru_error(error):
raise self.retry(countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
def _create_sailthru_user_parm(sailthru_vars, email, new_user, email_config):
"""
Create sailthru user create/update parms
"""
sailthru_user = {'id': email, 'key': 'email'}
sailthru_user['vars'] = dict(sailthru_vars, last_changed_time=int(time.time()))
# if new user add to list
if new_user and email_config.sailthru_new_user_list:
sailthru_user['lists'] = {email_config.sailthru_new_user_list: 1}
return sailthru_user
# pylint: disable=not-callable
@task(bind=True, default_retry_delay=3600, max_retries=24)
def update_course_enrollment(self, email, course_url, event, mode,
course_id=None, message_id=None): # pylint: disable=unused-argument
"""
Adds/updates Sailthru when a user enrolls/unenrolls/adds to cart/purchases/upgrades a course
Args:
email(str): The user's email address
course_url(str): Course home page url
event(str): event type
mode(str): enroll mode (audit, verification, ...)
unit_cost: cost if purchase event
course_id(str): course run id
currency(str): currency if purchase event - currently ignored since Sailthru only supports USD
Returns:
None
The event can be one of the following:
EnrollStatusChange.enroll
A free enroll (mode=audit or honor)
EnrollStatusChange.unenroll
An unenroll
EnrollStatusChange.upgrade_start
A paid upgrade added to cart - ignored
EnrollStatusChange.upgrade_complete
A paid upgrade purchase complete - ignored
EnrollStatusChange.paid_start
A non-free course added to cart - ignored
EnrollStatusChange.paid_complete
A non-free course purchase complete - ignored
"""
email_config = EmailMarketingConfiguration.current()
if not email_config.enabled:
return
# Use event type to figure out processing required
unenroll = False
send_template = None
cost_in_cents = 0
if event == EnrollStatusChange.enroll:
send_template = email_config.sailthru_enroll_template
# set cost so that Sailthru recognizes the event
cost_in_cents = email_config.sailthru_enroll_cost
elif event == EnrollStatusChange.unenroll:
# unenroll - need to update list of unenrolled courses for user in Sailthru
unenroll = True
else:
# All purchase events should be handled by ecommerce, so ignore
return
sailthru_client = SailthruClient(email_config.sailthru_key, email_config.sailthru_secret)
# update the "unenrolled" course array in the user record on Sailthru
if not _update_unenrolled_list(sailthru_client, email, course_url, unenroll):
raise self.retry(countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
# if there is a cost, call Sailthru purchase api to record
if cost_in_cents:
# get course information if configured and appropriate event
course_data = {}
if email_config.sailthru_get_tags_from_sailthru:
course_data = _get_course_content(course_url, sailthru_client, email_config)
# build item description
item = _build_purchase_item(course_id, course_url, cost_in_cents, mode, course_data)
# build purchase api options list
options = {}
# add appropriate send template
if send_template:
options['send_template'] = send_template
if not _record_purchase(sailthru_client, email, item, message_id, options):
raise self.retry(countdown=email_config.sailthru_retry_interval,
max_retries=email_config.sailthru_max_retries)
def _build_purchase_item(course_id_string, course_url, cost_in_cents, mode, course_data):
"""
Build Sailthru purchase item object
:return: item
"""
# build item description
item = {
'id': "{}-{}".format(course_id_string, mode),
'url': course_url,
'price': cost_in_cents,
'qty': 1,
}
# make up title if we don't already have it from Sailthru
if 'title' in course_data:
item['title'] = course_data['title']
else:
item['title'] = 'Course {} mode: {}'.format(course_id_string, mode)
if 'tags' in course_data:
item['tags'] = course_data['tags']
# add vars to item
item['vars'] = dict(course_data.get('vars', {}), mode=mode, course_run_id=course_id_string)
return item
def _record_purchase(sailthru_client, email, item, message_id, options):
"""
Record a purchase in Sailthru
:param sailthru_client:
:param email:
:param item:
:param incomplete:
:param message_id:
:param options:
:return: False it retryable error
"""
try:
sailthru_response = sailthru_client.purchase(email, [item],
message_id=message_id,
options=options)
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
log.error("Error attempting to record purchase in Sailthru: %s", error.get_message())
return not _retryable_sailthru_error(error)
except SailthruClientError as exc:
log.error("Exception attempting to record purchase for %s in Sailthru - %s", email, unicode(exc))
return False
return True
def _get_course_content(course_url, sailthru_client, email_config):
"""
Get course information using the Sailthru content api.
If there is an error, just return with an empty response.
:param course_url:
:param sailthru_client:
:return: dict with course information
"""
# check cache first
response = cache.get(course_url)
if not response:
try:
sailthru_response = sailthru_client.api_get("content", {"id": course_url})
if not sailthru_response.is_ok():
return {}
response = sailthru_response.json
cache.set(course_url, response, email_config.sailthru_content_cache_age)
except SailthruClientError:
response = {}
return response
def _update_unenrolled_list(sailthru_client, email, course_url, unenroll):
"""
Maintain a list of courses the user has unenrolled from in the Sailthru user record
:param sailthru_client:
:param email:
:param email_config:
:param course_url:
:param unenroll:
:return: False if retryable error, else True
"""
try:
# get the user 'vars' values from sailthru
sailthru_response = sailthru_client.api_get("user", {"id": email, "fields": {"vars": 1}})
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
log.info("Error attempting to read user record from Sailthru: %s", error.get_message())
return not _retryable_sailthru_error(error)
response_json = sailthru_response.json
unenroll_list = []
if response_json and "vars" in response_json and response_json["vars"] \
and "unenrolled" in response_json["vars"]:
unenroll_list = response_json["vars"]["unenrolled"]
changed = False
# if unenrolling, add course to unenroll list
if unenroll:
if course_url not in unenroll_list:
unenroll_list.append(course_url)
changed = True
# if enrolling, remove course from unenroll list
elif course_url in unenroll_list:
unenroll_list.remove(course_url)
changed = True
if changed:
# write user record back
sailthru_response = sailthru_client.api_post(
"user", {'id': email, 'key': 'email', "vars": {"unenrolled": unenroll_list}})
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
log.info("Error attempting to update user record in Sailthru: %s", error.get_message())
return not _retryable_sailthru_error(error)
# everything worked
return True
except SailthruClientError as exc:
log.error("Exception attempting to update user record for %s in Sailthru - %s", email, unicode(exc))
return False
def _retryable_sailthru_error(error):
""" Return True if error should be retried.
9: Retryable internal error
43: Rate limiting response
others: Not retryable
See: https://getstarted.sailthru.com/new-for-developers-overview/api/api-response-errors/
"""
code = error.get_error_code()
return code == 9 or code == 43
|
louyihua/edx-platform
|
lms/djangoapps/email_marketing/tasks.py
|
Python
|
agpl-3.0
| 13,582
|
"""Main product initializer
"""
from zope.i18nmessageid import MessageFactory
from uwosh.pfg.d2c import config
from Products.Archetypes import atapi
from Products.CMFCore import utils
from Products.CMFCore.permissions import setDefaultRoles
# Define a message factory for when this product is internationalised.
# This will be imported with the special name "_" in most modules. Strings
# like _(u"message") will then be extracted by i18n tools for translation.
pfgSearchMessageFactory = MessageFactory('uwosh.pfg_search')
def initialize(context):
"""Initializer called when used as a Zope 2 product.
This is referenced from configure.zcml. Regstrations as a "Zope 2 product"
is necessary for GenericSetup profiles to work, for example.
Here, we call the Archetypes machinery to register our content types
with Zope and the CMF.
"""
# Retrieve the content types that have been registered with Archetypes
# This happens when the content type is imported and the registerType()
# call in the content type's module is invoked. Actually, this happens
# during ZCML processing, but we do it here again to be explicit. Of
# course, even if we import the module several times, it is only run
# once.
import uwosh.pfg_search.content
content_types, constructors, ftis = atapi.process_types(
atapi.listTypes(config.PROJECTNAME),
config.PROJECTNAME)
# Now initialize all these content types. The initialization process takes
# care of registering low-level Zope 2 factories, including the relevant
# add-permission. These are listed in config.py. We use different
# permissions for each content type to allow maximum flexibility of who
# can add which content types, where. The roles are set up in rolemap.xml
# in the GenericSetup profile.
for atype, constructor in zip(content_types, constructors):
utils.ContentInit('%s: %s' % (config.PROJECTNAME, atype.portal_type),
content_types = (atype,),
permission = config.ADD_PERMISSIONS[atype.portal_type],
extra_constructors = (constructor,),
).initialize(context)
|
uwosh/uwosh.pfg_search
|
uwosh/pfg_search/__init__.py
|
Python
|
gpl-2.0
| 2,182
|
class Solution(object):
def isPerfectSquare(self, num):
"""
:type num: int
:rtype: bool
"""
if num <= 1:
return True
l = 1
r = num / 2
while l < r:
m = l + (r - l) / 2
if m < num / m:
l = m + 1
else:
r = m
return (l == num / l and num % l == 0)
|
hawkphantomnet/leetcode
|
ValidPerfectSquare/Solution.py
|
Python
|
mit
| 401
|
import os
from unittest import TestCase
from foster.init import Init
class InitTestCase(TestCase):
def setUp(self):
root = os.path.join(os.path.dirname(__file__), 'frames', 'init')
self.target = os.path.join(root, 'package.py')
os.chdir(root)
def tearDown(self):
if os.path.isfile(self.target):
os.unlink(self.target)
def test_create_package_file(self):
Init().run()
self.assertTrue(os.path.isfile(self.target))
with open(self.target, 'rb') as f:
self.assertTrue(f.read())
def test_create_package_dont_overwrite_existing_file(self):
open(self.target, 'a').close()
with self.assertRaises(SystemExit):
Init().run()
with open(self.target, 'rb') as f:
self.assertEqual(f.read(), b'')
|
hugollm/foster
|
tests/init_tests.py
|
Python
|
mit
| 831
|
LOOP_TAKEN = 101
LOOP_NOT_TAKEN = 20
"""
Tuples of:
* Test file name without extension
* Pre-loop code (if any)
* Loop condition
* Expected answer (20 loop not taken, 101 loop taken)
"""
condition_answer_pairs = [
# boolean true/false constants
("while_loop_with_constant_false_is_not_taken",
"",
"false",
LOOP_NOT_TAKEN),
("while_loop_with_constant_true_is_taken",
"",
"true",
101),
# boolean true/false through variable
("while_loop_with_variable_false_is_not_taken",
"let condition: bool = false;",
"condition",
LOOP_NOT_TAKEN),
("while_loop_with_variable_true_is_taken",
"let condition: bool = true;",
"condition",
101),
# constant CMP constant
("const_const_should_take_while_loop_with_less_than_comparison_when_lhs_is_less",
"",
"1 < 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_equal",
"",
"2 < 2",
LOOP_NOT_TAKEN),
("const_const_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_greater",
"",
"3 < 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_less",
"",
"1 <= 2",
LOOP_TAKEN),
("const_const_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_equal", "",
"2 <= 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_greater",
"",
"3 <= 2",
LOOP_NOT_TAKEN),
("const_const_should_not_take_while_loop_with_eq_comparison_when_lhs_is_less",
"",
"1 == 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_eq_comparison_when_lhs_is_equal",
"",
"2 == 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_eq_comparison_when_lhs_is_greater",
"",
"3 == 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_not_eq_comparison_when_lhs_is_less",
"",
"3 != 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_not_eq_comparison_when_lhs_is_equal",
"",
"2 != 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_not_eq_comparison_when_lhs_is_greater",
"",
"3 != 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_less",
"",
"1 >= 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_equal",
"",
"2 >= 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_greater",
"",
"3 >= 2",
LOOP_TAKEN),
("const_const_should_not_take_while_loop_with_greater_than_comparison_when_lhs_is_less",
"",
"1 > 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_greater_than_comparison_when_lhs_is_equal",
"",
"2 > 2",
LOOP_NOT_TAKEN),
("const_const_should_take_while_loop_with_greater_than_comparison_when_lhs_is_greater",
"",
"3 > 2",
LOOP_TAKEN),
# variable CMP constant
("variable_const_should_take_while_loop_with_less_than_comparison_when_lhs_is_less",
"let value: int = 1;",
"value < 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_equal",
"let value: int = 2;",
"value < 2",
LOOP_NOT_TAKEN),
("variable_const_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_greater",
"let value: int = 3; ",
"value < 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_less",
"let value: int = 1;",
"value <= 2",
LOOP_TAKEN),
("variable_const_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"value <= 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_greater",
"let value: int = 3;",
"value <= 2",
LOOP_NOT_TAKEN),
("variable_const_should_not_take_while_loop_with_eq_comparison_when_lhs_is_less",
"let value: int = 1;",
"value == 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"value == 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_eq_comparison_when_lhs_is_greater",
"let value: int = 3;",
"value == 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_not_eq_comparison_when_lhs_is_less",
"let value: int = 1;",
"value != 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_not_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"value != 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_not_eq_comparison_when_lhs_is_greater",
"let value: int = 3;",
"value != 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_less",
"let value: int = 1;",
"value >= 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"value >= 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_greater",
"let value: int = 3;",
"value >= 2",
LOOP_TAKEN),
("variable_const_should_not_take_while_loop_with_greater_than_comparison_when_lhs_is_less",
"let value: int = 1;",
"value > 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_greater_than_comparison_when_lhs_is_equal",
"let value: int = 2;",
"value > 2",
LOOP_NOT_TAKEN),
("variable_const_should_take_while_loop_with_greater_than_comparison_when_lhs_is_greater",
"let value: int = 3;",
"value > 2",
LOOP_TAKEN),
# constant CMP variable
("const_variable_should_take_while_loop_with_less_than_comparison_when_lhs_is_less",
"let value: int = 2;",
"1 < value",
LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_equal",
"let value: int = 2;",
"2 < value",
LOOP_NOT_TAKEN),
("const_variable_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_greater",
"let value: int = 2;",
"3 < value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_less",
"let value: int = 2;",
"1 <= value",
LOOP_TAKEN),
("const_variable_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"2 <= value", LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_greater",
"let value: int = 2;",
"3 <= value", LOOP_NOT_TAKEN),
("const_variable_should_not_take_while_loop_with_eq_comparison_when_lhs_is_less",
"let value: int = 2;",
"1 == value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"2 == value",
LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_eq_comparison_when_lhs_is_greater",
"let value: int = 2;",
"3 == value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_not_eq_comparison_when_lhs_is_less",
"let value: int = 2;",
"1 != value",
LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_not_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"2 != value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_not_eq_comparison_when_lhs_is_greater",
"let value: int = 2;",
"3 != value",
LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_less",
"let value: int = 2;",
"1 >= value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_equal",
"let value: int = 2;",
"2 >= value",
LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_greater",
"let value: int = 2;",
"3 >= value",
LOOP_TAKEN),
("const_variable_should_not_take_while_loop_with_greater_than_comparison_when_lhs_is_less",
"let value: int = 2;",
"1 > value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_greater_than_comparison_when_lhs_is_equal",
"let value: int = 2;",
"2 > value",
LOOP_NOT_TAKEN),
("const_variable_should_take_while_loop_with_greater_than_comparison_when_lhs_is_greater",
"let value: int = 2;",
"3 > value",
LOOP_TAKEN),
# variable CMP variable
("variable_variable_should_take_while_loop_with_less_than_comparison_when_lhs_is_less",
"let value: int = 1; let rhs: int = 2;",
"value < rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_equal",
"let value: int = 2; let rhs: int = 2;",
"value < rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_not_take_while_loop_with_less_than_comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value < rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_less",
"let value: int = 1; let rhs: int = 2;",
"value <= rhs",
LOOP_TAKEN),
("variable_variable_should_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_equal",
"let value: int = 2; let rhs: int = 2;",
"value <= rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_less_than_or_eq_comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value <= rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_not_take_while_loop_with_eq_comparison_when_lhs_is_less",
"let value: int = 1; let rhs: int = 2;",
"value == rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_eq_comparison_when_lhs_is_equal",
"let value: int = 2; let rhs: int = 2;",
"value == rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_eq_comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value == rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_not_eq_comparison_when_lhs_is_less",
"let value: int = 1; let rhs: int = 2;",
"value != rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_not_eq_comparison_when_lhs_is_equal",
"let value: int = 2; let rhs: int = 2;",
"value != rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_not_eq_comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value != rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_less",
"let value: int = 1; let rhs: int = 2;",
"value >= rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_equal",
"let value: int = 2; let rhs: int = 2;",
"value >= rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_greater_or_eq_comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value >= rhs",
LOOP_TAKEN),
("variable_variable_should_not_take_while_loop_with_greater_than_comparison_when_lhs_is_less",
"let value: int = 1; let rhs: int = 2;",
"value > rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_greater_than_comparison_when_lhs_is_equal",
"let value: int = 2; let rhs: int = 2;",
"value > rhs",
LOOP_NOT_TAKEN),
("variable_variable_should_take_while_loop_with_greater_than_comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value > rhs",
LOOP_TAKEN),
# expressions
("constant_constant_expressions_should_not_take_while_loop_with_greater_than_comparison_when_lhs_is_less",
"",
"2 + 3 > 2*3+1 ",
LOOP_NOT_TAKEN),
("variable_constant_expressions_should_take_while_loop_when_both_side_are_equal",
"let value: int = 2; let rhs: int = 6;",
"value + rhs == 4*2 ",
LOOP_TAKEN),
("constant_variable_expressions_should_take_while_loop_with_less_than_comparison_when_lhs_is_less",
"let value: int = 3; let rhs: int = 2;",
"10/3 < value*rhs",
LOOP_TAKEN),
("variable_variable_expressions_should_not_take_while_loop_with_less_than_or_equal__comparison_when_lhs_is_greater",
"let value: int = 3; let rhs: int = 2;",
"value*5 <= rhs/2",
LOOP_NOT_TAKEN),
]
for pair in condition_answer_pairs:
out = f'''program: |
fn test_function() : int {{
let a: int = {LOOP_NOT_TAKEN};
{pair[1]}
while {pair[2]} {{
a = a+1;
if a > 100 {{
return a;
}}
}}
return a;
}}
link_with:
- tests/files/support/support.c
callable: test_function
returns: int
expect_stdout: |
{pair[3]}
'''
with open(f"{pair[0]}.yaml", "w") as f:
f.write(out)
|
Valtis/YATCP
|
tests/programs/test_programs/generated_while_loop_condition_tests/_generate_tests.py
|
Python
|
gpl-2.0
| 13,324
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
class FamilyHistory(models.Model):
_inherit = 'clv.family.history'
family_id = fields.Many2one(
comodel_name='clv.family',
string='Family',
ondelete='restrict'
)
class Family(models.Model):
_inherit = 'clv.family'
family_history_ids = fields.One2many(
comodel_name='clv.family.history',
inverse_name='family_id',
string='Families (History)'
)
count_family_histories = fields.Integer(
string='Families (History) (count)',
compute='_compute_count_family_histories',
)
@api.depends('family_history_ids')
def _compute_count_family_histories(self):
for r in self:
r.count_family_histories = len(r.family_history_ids)
|
CLVsol/clvsol_odoo_addons
|
clv_family_history/models/family.py
|
Python
|
agpl-3.0
| 931
|
import numpy as np
def get_data(params):
"""generates sinthetic dataset"""
#input size -> cnt_sequences, len_suqence, cnt_dimentsions
# transform into (seq_len, batch) x cnt_batches
assert params["problem"]["size"][0] % params["batch_size"] == 0
params["problem"]["len_sequence"] = params["problem"]["size"][1]
cnt_batches = params["problem"]["size"][0] // params["batch_size"]
shape = (cnt_batches,
params["problem"]["len_sequence"],
params["batch_size"])
X = np.random.random(shape).astype(np.int64)
Y = np.ones((cnt_batches, params["batch_size"]))
return X, Y
|
undertherain/benchmarker
|
benchmarker/kernels/bert_custom/data.py
|
Python
|
mpl-2.0
| 631
|
# ERPNext - web based ERP (http://erpnext.com)
# Copyright (C) 2012 Web Notes Technologies Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import webnotes
from webnotes.utils import add_days, cstr, getdate
from webnotes.model.doc import addchild
from webnotes.model.bean import getlist
from webnotes.model.code import get_obj
from webnotes import msgprint
sql = webnotes.conn.sql
from utilities.transaction_base import TransactionBase, delete_events
class DocType(TransactionBase):
def __init__(self, doc, doclist=[]):
self.doc = doc
self.doclist = doclist
# pull sales order details
#--------------------------
def pull_sales_order_detail(self):
self.doclist = self.doc.clear_table(self.doclist, 'item_maintenance_detail')
self.doclist = self.doc.clear_table(self.doclist, 'maintenance_schedule_detail')
self.doclist = get_obj('DocType Mapper', 'Sales Order-Maintenance Schedule').dt_map('Sales Order', 'Maintenance Schedule', self.doc.sales_order_no, self.doc, self.doclist, "[['Sales Order', 'Maintenance Schedule'],['Sales Order Item', 'Maintenance Schedule Item']]")
#pull item details
#-------------------
def get_item_details(self, item_code):
item = sql("select item_name, description from `tabItem` where name = '%s'" %(item_code), as_dict=1)
ret = {
'item_name': item and item[0]['item_name'] or '',
'description' : item and item[0]['description'] or ''
}
return ret
# generate maintenance schedule
#-------------------------------------
def generate_schedule(self):
self.doclist = self.doc.clear_table(self.doclist, 'maintenance_schedule_detail')
count = 0
sql("delete from `tabMaintenance Schedule Detail` where parent='%s'" %(self.doc.name))
for d in getlist(self.doclist, 'item_maintenance_detail'):
self.validate_maintenance_detail()
s_list =[]
s_list = self.create_schedule_list(d.start_date, d.end_date, d.no_of_visits)
for i in range(d.no_of_visits):
child = addchild(self.doc, 'maintenance_schedule_detail',
'Maintenance Schedule Detail', self.doclist)
child.item_code = d.item_code
child.item_name = d.item_name
child.scheduled_date = s_list[i].strftime('%Y-%m-%d')
if d.serial_no:
child.serial_no = d.serial_no
child.idx = count
count = count+1
child.incharge_name = d.incharge_name
child.save(1)
self.on_update()
def on_submit(self):
if not getlist(self.doclist, 'maintenance_schedule_detail'):
msgprint("Please click on 'Generate Schedule' to get schedule")
raise Exception
self.check_serial_no_added()
self.validate_serial_no_warranty()
self.validate_schedule()
email_map ={}
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.serial_no:
self.update_amc_date(d.serial_no, d.end_date)
if d.incharge_name not in email_map:
e = sql("select email_id, name from `tabSales Person` where name='%s' " %(d.incharge_name),as_dict=1)[0]
email_map[d.incharge_name] = (e['email_id'])
scheduled_date =sql("select scheduled_date from `tabMaintenance Schedule Detail` \
where incharge_name='%s' and item_code='%s' and parent='%s' " %(d.incharge_name, \
d.item_code, self.doc.name), as_dict=1)
for key in scheduled_date:
if email_map[d.incharge_name]:
description = "Reference: %s, Item Code: %s and Customer: %s" % \
(self.doc.name, d.item_code, self.doc.customer)
webnotes.bean({
"doctype": "Event",
"owner": email_map[d.incharge_name] or self.doc.owner,
"subject": description,
"description": description,
"starts_on": key["scheduled_date"] + " 10:00:00",
"event_type": "Private",
"ref_type": self.doc.doctype,
"ref_name": self.doc.name
}).insert()
webnotes.conn.set(self.doc, 'status', 'Submitted')
#get schedule dates
#----------------------
def create_schedule_list(self, start_date, end_date, no_of_visit):
schedule_list = []
start_date1 = start_date
date_diff = (getdate(end_date) - getdate(start_date)).days
add_by = date_diff/no_of_visit
#schedule_list.append(start_date1)
while(getdate(start_date1) < getdate(end_date)):
start_date1 = add_days(start_date1, add_by)
if len(schedule_list) < no_of_visit:
schedule_list.append(getdate(start_date1))
return schedule_list
#validate date range and periodicity selected
#-------------------------------------------------
def validate_period(self, arg):
arg1 = eval(arg)
if getdate(arg1['start_date']) >= getdate(arg1['end_date']):
msgprint("Start date should be less than end date ")
raise Exception
period = (getdate(arg1['end_date'])-getdate(arg1['start_date'])).days+1
if (arg1['periodicity']=='Yearly' or arg1['periodicity']=='Half Yearly' or arg1['periodicity']=='Quarterly') and period<365:
msgprint(cstr(arg1['periodicity'])+ " periodicity can be set for period of atleast 1 year or more only")
raise Exception
elif arg1['periodicity']=='Monthly' and period<30:
msgprint("Monthly periodicity can be set for period of atleast 1 month or more")
raise Exception
elif arg1['periodicity']=='Weekly' and period<7:
msgprint("Weekly periodicity can be set for period of atleast 1 week or more")
raise Exception
#get count on the basis of periodicity selected
#----------------------------------------------------
def get_no_of_visits(self, arg):
arg1 = eval(arg)
self.validate_period(arg)
period = (getdate(arg1['end_date'])-getdate(arg1['start_date'])).days+1
count =0
if arg1['periodicity'] == 'Weekly':
count = period/7
elif arg1['periodicity'] == 'Monthly':
count = period/30
elif arg1['periodicity'] == 'Quarterly':
count = period/91
elif arg1['periodicity'] == 'Half Yearly':
count = period/182
elif arg1['periodicity'] == 'Yearly':
count = period/365
ret = {'no_of_visits':count}
return ret
def validate_maintenance_detail(self):
if not getlist(self.doclist, 'item_maintenance_detail'):
msgprint("Please enter Maintaince Details first")
raise Exception
for d in getlist(self.doclist, 'item_maintenance_detail'):
if not d.item_code:
msgprint("Please select item code")
raise Exception
elif not d.start_date or not d.end_date:
msgprint("Please select Start Date and End Date for item "+d.item_code)
raise Exception
elif not d.no_of_visits:
msgprint("Please mention no of visits required")
raise Exception
elif not d.incharge_name:
msgprint("Please select Incharge Person's name")
raise Exception
if getdate(d.start_date) >= getdate(d.end_date):
msgprint("Start date should be less than end date for item "+d.item_code)
raise Exception
#check if maintenance schedule already created against same sales order
#-----------------------------------------------------------------------------------
def validate_sales_order(self):
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.prevdoc_docname:
chk = sql("select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1", d.prevdoc_docname)
if chk:
msgprint("Maintenance Schedule against "+d.prevdoc_docname+" already exist")
raise Exception
# Validate values with reference document
#----------------------------------------
def validate_reference_value(self):
get_obj('DocType Mapper', 'Sales Order-Maintenance Schedule', with_children = 1).validate_reference_value(self, self.doc.name)
def validate_serial_no(self):
for d in getlist(self.doclist, 'item_maintenance_detail'):
cur_s_no=[]
if d.serial_no:
cur_serial_no = d.serial_no.replace(' ', '')
cur_s_no = cur_serial_no.split(',')
for x in cur_s_no:
chk = sql("select name, status from `tabSerial No` where docstatus!=2 and name=%s", (x))
chk1 = chk and chk[0][0] or ''
status = chk and chk[0][1] or ''
if not chk1:
msgprint("Serial no "+x+" does not exist in system.")
raise Exception
else:
if status=='In Store' or status=='Note in Use' or status=='Scrapped':
msgprint("Serial no "+x+" is '"+status+"'")
raise Exception
def validate(self):
self.validate_maintenance_detail()
self.validate_sales_order()
if self.doc.sales_order_no:
self.validate_reference_value()
self.validate_serial_no()
self.validate_start_date()
# validate that maintenance start date can not be before serial no delivery date
#-------------------------------------------------------------------------------------------
def validate_start_date(self):
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.serial_no:
cur_serial_no = d.serial_no.replace(' ', '')
cur_s_no = cur_serial_no.split(',')
for x in cur_s_no:
dt = sql("select delivery_date from `tabSerial No` where name = %s", x)
dt = dt and dt[0][0] or ''
if dt:
if dt > getdate(d.start_date):
msgprint("Maintenance start date can not be before delivery date "+dt.strftime('%Y-%m-%d')+" for serial no "+x)
raise Exception
#update amc expiry date in serial no
#------------------------------------------
def update_amc_date(self,serial_no,amc_end_date):
#get current list of serial no
cur_serial_no = serial_no.replace(' ', '')
cur_s_no = cur_serial_no.split(',')
for x in cur_s_no:
sql("update `tabSerial No` set amc_expiry_date = '%s', maintenance_status = 'Under AMC' where name = '%s'"% (amc_end_date,x))
def on_update(self):
webnotes.conn.set(self.doc, 'status', 'Draft')
def validate_serial_no_warranty(self):
for d in getlist(self.doclist, 'item_maintenance_detail'):
if cstr(d.serial_no).strip():
dt = sql("""select warranty_expiry_date, amc_expiry_date
from `tabSerial No` where name = %s""", d.serial_no, as_dict=1)
if dt[0]['warranty_expiry_date'] and dt[0]['warranty_expiry_date'] >= d.start_date:
webnotes.msgprint("""Serial No: %s is already under warranty upto %s.
Please check AMC Start Date.""" %
(d.serial_no, dt[0]["warranty_expiry_date"]), raise_exception=1)
if dt[0]['amc_expiry_date'] and dt[0]['amc_expiry_date'] >= d.start_date:
webnotes.msgprint("""Serial No: %s is already under AMC upto %s.
Please check AMC Start Date.""" %
(d.serial_no, dt[0]["amc_expiry_date"]), raise_exception=1)
def validate_schedule(self):
item_lst1 =[]
item_lst2 =[]
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.item_code not in item_lst1:
item_lst1.append(d.item_code)
for m in getlist(self.doclist, 'maintenance_schedule_detail'):
if m.item_code not in item_lst2:
item_lst2.append(m.item_code)
if len(item_lst1) != len(item_lst2):
msgprint("Maintenance Schedule is not generated for all the items. Please click on 'Generate Schedule'")
raise Exception
else:
for x in item_lst1:
if x not in item_lst2:
msgprint("Maintenance Schedule is not generated for item "+x+". Please click on 'Generate Schedule'")
raise Exception
#check if serial no present in item maintenance table
#-----------------------------------------------------------
def check_serial_no_added(self):
serial_present =[]
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.serial_no:
serial_present.append(d.item_code)
for m in getlist(self.doclist, 'maintenance_schedule_detail'):
if serial_present:
if m.item_code in serial_present and not m.serial_no:
msgprint("Please click on 'Generate Schedule' to fetch serial no added for item "+m.item_code)
raise Exception
def on_cancel(self):
for d in getlist(self.doclist, 'item_maintenance_detail'):
if d.serial_no:
self.update_amc_date(d.serial_no, '')
webnotes.conn.set(self.doc, 'status', 'Cancelled')
delete_events(self.doc.doctype, self.doc.name)
def on_trash(self):
delete_events(self.doc.doctype, self.doc.name)
|
gangadhar-kadam/mic-erpnext
|
support/doctype/maintenance_schedule/maintenance_schedule.py
|
Python
|
agpl-3.0
| 12,625
|
#!/usr/bin/env python3
import random, os, sys
from subprocess import check_output
# first argument is the correct program
correct_program = "solution"
in_file = "test_gen.in"
for index in range(1,20):
in_str = ""
k = random.randint(1,1000000)
in_str += str(k)
in_str += "\n"
correct_out = ""
with open(in_file, "w") as infile:
infile.write(in_str)
with open(in_file, "rb") as infile:
correct_out = check_output([os.path.abspath(correct_program)], stdin=infile).decode("utf-8")
os.remove(in_file)
with open("testdata/secret" + str(index) + ".in", "w") as infile:
infile.write(in_str)
with open("testdata/secret" + str(index) + ".out", "w") as outfile:
outfile.write(correct_out)
|
ArVID220u/judge
|
problems/uddaellerjamnt/test_generator.py
|
Python
|
mit
| 759
|
#!/usr/bin/python
#
# This file is part of CONCUSS, https://github.com/theoryinpractice/concuss/,
# and is Copyright (C) North Carolina State University, 2015. It is licensed
# under the three-clause BSD license; see LICENSE.
#
from lib.util.memorized import memorized
from lib.graph.graph import Graph
# Calculate one transitive-fraternal-augmentation-step and
# result a tuple (newgraph, transedges, fratedges)
@memorized(['orig', 'step'])
def trans_frater_augmentation(orig, g, trans, frat, col,
nodes, step, td, ldoFunc):
fratGraph = Graph()
newTrans = {}
for v in g:
for x, y, _, in g.trans_trips(v):
newTrans[(x, y)] = step
assert (not g.adjacent(x, y)), \
"{0} {1} transitive but adjacent".format(x, y)
for x, y, _ in g.frat_trips(v):
fratGraph.add_edge(x, y)
assert (not g.adjacent(x, y)), \
"{0} {1} fraternal but adjacent".format(x, y)
for (s, t) in newTrans.keys():
g.add_arc(s, t, 1)
fratGraph.remove_edge(s, t)
# TODO: support dict to see current in-degree...
fratDigraph = ldoFunc(fratGraph)
# calculate result
trans.update(newTrans)
for s, t, _ in fratDigraph.arcs():
frat[(s, t)] = step
g.add_arc(s, t, 1)
return (g, trans, frat)
# end def
|
nish10z/CONCUSS
|
lib/coloring/basic/trans_frater_augmentation.py
|
Python
|
bsd-3-clause
| 1,365
|
# -*- coding: utf-8 -*-
##
## This file is part of SCOAP3.
## Copyright (C) 2014 CERN.
##
## SCOAP3 is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## SCOAP3 is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with SCOAP3; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Springer BibTaskLet
"""
from harvestingkit.springer_package import SpringerPackage
from bst_utils import run
def bst_springer():
sp = SpringerPackage()
run(sp, sp.packages_delivery, sp.doi_package_name_mapping)
|
Dziolas/scoap3-1
|
bibsched_tasklets/bst_springer.py
|
Python
|
gpl-2.0
| 1,011
|
"""mascotas URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
w12n/mascotas
|
mascotas/urls.py
|
Python
|
mit
| 764
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ShortenUrl'
db.create_table('leech_shortenurl', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('source_url', self.gf('django.db.models.fields.CharField')(max_length=255)),
('slug', self.gf('django.db.models.fields.CharField')(null=True, max_length=32)),
('create_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('user_uuid', self.gf('django.db.models.fields.CharField')(null=True, max_length=64)),
))
db.send_create_signal('leech', ['ShortenUrl'])
# Adding model 'ClickLog'
db.create_table('leech_clicklog', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shorten_url', self.gf('django.db.models.fields.related.ForeignKey')(related_name='click_logs', to=orm['leech.ShortenUrl'])),
('user_agent', self.gf('django.db.models.fields.CharField')(max_length=255)),
('remote_address', self.gf('django.db.models.fields.CharField')(max_length=64)),
('click_time', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal('leech', ['ClickLog'])
# Adding model 'ClickLogAttribute'
db.create_table('leech_clicklogattribute', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('click_log', self.gf('django.db.models.fields.related.ForeignKey')(related_name='attributes', to=orm['leech.ClickLog'])),
('name', self.gf('django.db.models.fields.IntegerField')()),
('value', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('leech', ['ClickLogAttribute'])
def backwards(self, orm):
# Deleting model 'ShortenUrl'
db.delete_table('leech_shortenurl')
# Deleting model 'ClickLog'
db.delete_table('leech_clicklog')
# Deleting model 'ClickLogAttribute'
db.delete_table('leech_clicklogattribute')
models = {
'leech.clicklog': {
'Meta': {'object_name': 'ClickLog'},
'click_time': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'remote_address': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'shorten_url': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'click_logs'", 'to': "orm['leech.ShortenUrl']"}),
'user_agent': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'leech.clicklogattribute': {
'Meta': {'object_name': 'ClickLogAttribute'},
'click_log': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['leech.ClickLog']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.IntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'leech.shortenurl': {
'Meta': {'object_name': 'ShortenUrl'},
'create_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'null': 'True', 'max_length': '32'}),
'source_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user_uuid': ('django.db.models.fields.CharField', [], {'null': 'True', 'max_length': '64'})
}
}
complete_apps = ['leech']
|
ritksm/leech
|
leech/migrations/0001_initial.py
|
Python
|
mit
| 4,040
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.