repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
CDKAssoc/ns3
|
refs/heads/master
|
.waf-1.7.13-5a064c2686fe54de4e11018d22148cfc/waflib/Tools/python.py
|
89
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
from waflib import Utils,Options,Errors,Logs
from waflib.TaskGen import extension,before_method,after_method,feature
from waflib.Configure import conf
FRAG='''
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main(int argc, char **argv)
{
(void)argc; (void)argv;
Py_Initialize();
Py_Finalize();
return 0;
}
'''
INST='''
import sys, py_compile
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
'''
DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib']
@extension('.py')
def process_py(self,node):
try:
if not self.bld.is_install:
return
except AttributeError:
return
try:
if not self.install_path:
return
except AttributeError:
self.install_path='${PYTHONDIR}'
def inst_py(ctx):
install_from=getattr(self,'install_from',None)
if install_from:
install_from=self.path.find_dir(install_from)
install_pyfile(self,node,install_from)
self.bld.add_post_fun(inst_py)
def install_pyfile(self,node,install_from=None):
from_node=install_from or node.parent
tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False)
path=tsk.get_install_path()
if self.bld.is_install<0:
Logs.info("+ removing byte compiled python files")
for x in'co':
try:
os.remove(path+x)
except OSError:
pass
if self.bld.is_install>0:
try:
st1=os.stat(path)
except OSError:
Logs.error('The python file is missing, this should not happen')
for x in['c','o']:
do_inst=self.env['PY'+x.upper()]
try:
st2=os.stat(path+x)
except OSError:
pass
else:
if st1.st_mtime<=st2.st_mtime:
do_inst=False
if do_inst:
lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[]
(a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x)
argv=self.env['PYTHON']+lst+['-c',INST,a,b,c]
Logs.info('+ byte compiling %r'%(path+x))
env=self.env.env or None
ret=Utils.subprocess.Popen(argv,env=env).wait()
if ret:
raise Errors.WafError('py%s compilation failed %r'%(x,path))
@feature('py')
def feature_py(self):
pass
@feature('pyext')
@before_method('propagate_uselib_vars','apply_link')
@after_method('apply_bundle')
def init_pyext(self):
self.uselib=self.to_list(getattr(self,'uselib',[]))
if not'PYEXT'in self.uselib:
self.uselib.append('PYEXT')
self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN
self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN
try:
if not self.install_path:
return
except AttributeError:
self.install_path='${PYTHONARCHDIR}'
@feature('pyext')
@before_method('apply_link','apply_bundle')
def set_bundle(self):
if Utils.unversioned_sys_platform()=='darwin':
self.mac_bundle=True
@before_method('propagate_uselib_vars')
@feature('pyembed')
def init_pyembed(self):
self.uselib=self.to_list(getattr(self,'uselib',[]))
if not'PYEMBED'in self.uselib:
self.uselib.append('PYEMBED')
@conf
def get_python_variables(self,variables,imports=None):
if not imports:
try:
imports=self.python_imports
except AttributeError:
imports=DISTUTILS_IMP
program=list(imports)
program.append('')
for v in variables:
program.append("print(repr(%s))"%v)
os_env=dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET']
except KeyError:
pass
try:
out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env)
except Errors.WafError:
self.fatal('The distutils module is unusable: install "python-devel"?')
self.to_log(out)
return_values=[]
for s in out.split('\n'):
s=s.strip()
if not s:
continue
if s=='None':
return_values.append(None)
elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'):
return_values.append(eval(s))
elif s[0].isdigit():
return_values.append(int(s))
else:break
return return_values
@conf
def check_python_headers(conf):
env=conf.env
if not env['CC_NAME']and not env['CXX_NAME']:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not env['PYTHON_VERSION']:
conf.check_python_version()
pybin=conf.env.PYTHON
if not pybin:
conf.fatal('Could not find the python executable')
v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
try:
lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")
vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)]
conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals)))
dct=dict(zip(v,lst))
x='MACOSX_DEPLOYMENT_TARGET'
if dct[x]:
conf.env[x]=conf.environ[x]=dct[x]
env['pyext_PATTERN']='%s'+dct['SO']
all_flags=dct['LDFLAGS']+' '+dct['CFLAGS']
conf.parse_flags(all_flags,'PYEMBED')
all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS']
conf.parse_flags(all_flags,'PYEXT')
result=None
for name in('python'+env['PYTHON_VERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')):
if not result and env['LIBPATH_PYEMBED']:
path=env['LIBPATH_PYEMBED']
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path)
result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name)
if not result and dct['LIBDIR']:
path=[dct['LIBDIR']]
conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path)
result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name)
if not result and dct['LIBPL']:
path=[dct['LIBPL']]
conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name)
if not result:
path=[os.path.join(dct['prefix'],"libs")]
conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name)
if result:
break
if result:
env['LIBPATH_PYEMBED']=path
env.append_value('LIB_PYEMBED',[name])
else:
conf.to_log("\n\n### LIB NOT FOUND\n")
if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']):
env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
env['LIB_PYEXT']=env['LIB_PYEMBED']
num='.'.join(env['PYTHON_VERSION'].split('.')[:2])
conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False)
includes=[]
if conf.env.PYTHON_CONFIG:
for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split():
if(incstr.startswith('-I')or incstr.startswith('/I')):
incstr=incstr[2:]
if incstr not in includes:
includes.append(incstr)
conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,))
env['INCLUDES_PYEXT']=includes
env['INCLUDES_PYEMBED']=includes
else:
conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],))
env['INCLUDES_PYEXT']=[dct['INCLUDEPY']]
env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']]
if env['CC_NAME']=='gcc':
env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing'])
env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing'])
if env['CXX_NAME']=='gcc':
env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing'])
env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing'])
if env.CC_NAME=="msvc":
from distutils.msvccompiler import MSVCCompiler
dist_compiler=MSVCCompiler()
dist_compiler.initialize()
env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options)
env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options)
env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared)
try:
conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg=':-(')
except conf.errors.ConfigurationError:
xx=conf.env.CXX_NAME and'cxx'or'c'
flags=['--cflags','--libs','--ldflags']
for f in flags:
conf.check_cfg(msg='Asking python-config for pyembed %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=[f])
conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx))
for f in flags:
conf.check_cfg(msg='Asking python-config for pyext %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=[f])
conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions')
@conf
def check_python_version(conf,minver=None):
assert minver is None or isinstance(minver,tuple)
pybin=conf.env['PYTHON']
if not pybin:
conf.fatal('could not find the python executable')
cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))']
Logs.debug('python: Running python command %r'%cmd)
lines=conf.cmd_and_log(cmd).split()
assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines)
pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
result=(minver is None)or(pyver_tuple>=minver)
if result:
pyver='.'.join([str(x)for x in pyver_tuple[:2]])
conf.env['PYTHON_VERSION']=pyver
if'PYTHONDIR'in conf.environ:
pydir=conf.environ['PYTHONDIR']
else:
if Utils.is_win32:
(python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
else:
python_LIBDEST=None
(pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
if python_LIBDEST is None:
if conf.env['LIBDIR']:
python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
else:
python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
if'PYTHONARCHDIR'in conf.environ:
pyarchdir=conf.environ['PYTHONARCHDIR']
else:
(pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
if not pyarchdir:
pyarchdir=pydir
if hasattr(conf,'define'):
conf.define('PYTHONDIR',pydir)
conf.define('PYTHONARCHDIR',pyarchdir)
conf.env['PYTHONDIR']=pydir
conf.env['PYTHONARCHDIR']=pyarchdir
pyver_full='.'.join(map(str,pyver_tuple[:3]))
if minver is None:
conf.msg('Checking for python version',pyver_full)
else:
minver_str='.'.join(map(str,minver))
conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW')
if not result:
conf.fatal('The python version is too old, expecting %r'%(minver,))
PYTHON_MODULE_TEMPLATE='''
import %s as current_module
version = getattr(current_module, '__version__', None)
if version is not None:
print(str(version))
else:
print('unknown version')
'''
@conf
def check_python_module(conf,module_name,condition=''):
msg='Python module %s'%module_name
if condition:
msg='%s (%s)'%(msg,condition)
conf.start_msg(msg)
try:
ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name])
except Exception:
conf.end_msg(False)
conf.fatal('Could not find the python module %r'%module_name)
ret=ret.strip()
if condition:
conf.end_msg(ret)
if ret=='unknown version':
conf.fatal('Could not check the %s version'%module_name)
from distutils.version import LooseVersion
def num(*k):
if isinstance(k[0],int):
return LooseVersion('.'.join([str(x)for x in k]))
else:
return LooseVersion(k[0])
d={'num':num,'ver':LooseVersion(ret)}
ev=eval(condition,{},d)
if not ev:
conf.fatal('The %s version does not satisfy the requirements'%module_name)
else:
if ret=='unknown version':
conf.end_msg(True)
else:
conf.end_msg(ret)
def configure(conf):
try:
conf.find_program('python',var='PYTHON')
except conf.errors.ConfigurationError:
Logs.warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable)
conf.env.PYTHON=sys.executable
if conf.env.PYTHON!=sys.executable:
Logs.warn("python executable %r differs from system %r"%(conf.env.PYTHON,sys.executable))
conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON)
v=conf.env
v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
v['PYFLAGS']=''
v['PYFLAGS_OPT']='-O'
v['PYC']=getattr(Options.options,'pyc',1)
v['PYO']=getattr(Options.options,'pyo',1)
def options(opt):
opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')
opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')
|
aexeagmbh/swampdragon
|
refs/heads/master
|
swampdragon/serializers/field_serializers.py
|
13
|
from datetime import date
from decimal import Decimal
from django.db.models.fields.files import ImageFieldFile, FileField
class BaseSerializer(object):
def serialize(self, value):
return value
class DateSerializer(BaseSerializer):
def serialize(self, value):
return str(value)
class DecimalSerializer(BaseSerializer):
def serialize(self, value):
return str(value)
class FileSerializer(BaseSerializer):
def serialize(self, value):
try:
return value.url
except:
return None
def serialize_field(value):
if isinstance(value, date):
return DateSerializer().serialize(value)
if isinstance(value, Decimal):
return DateSerializer().serialize(value)
if isinstance(value, ImageFieldFile) or isinstance(value, FileField):
return FileSerializer().serialize(value)
return value
|
jackxiang/jaikuengine
|
refs/heads/master
|
components/tests.py
|
36
|
"""Loads tests from components
All tests.py files inside loaded components are imported and any classes
derived from unittest.TestCase are then referenced from this file itself
so that they appear at the top level of the tests "module" that Django will
import.
"""
import os
import types
import unittest
from common import component
test_names = []
for name, loaded_component in component.loaded.iteritems():
test_dir = os.path.dirname(loaded_component.__file__)
for filename in os.listdir(test_dir):
if filename != "tests.py":
continue
# Import the test file and find all TestClass clases inside it.
test_module = __import__('components.%s.%s' % (name, filename[:-3]),
{}, {},
filename[:-3])
for name in dir(test_module):
item = getattr(test_module, name)
if not (isinstance(item, (type, types.ClassType)) and
issubclass(item, unittest.TestCase)):
continue
# Found a test, bring into the module namespace.
exec "%s = item" % name
test_names.append(name)
# Hide everything other than the test cases from other modules.
__all__ = test_names
|
yavuzovski/playground
|
refs/heads/master
|
python/django/RESTTest/.venv/lib/python3.4/site-packages/django/contrib/postgres/fields/ranges.py
|
109
|
import json
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange, Range
from django.contrib.postgres import forms, lookups
from django.db import models
from django.utils import six
from .utils import AttributeSetter
__all__ = [
'RangeField', 'IntegerRangeField', 'BigIntegerRangeField',
'FloatRangeField', 'DateTimeRangeField', 'DateRangeField',
]
class RangeField(models.Field):
empty_strings_allowed = False
def __init__(self, *args, **kwargs):
# Initializing base_field here ensures that its model matches the model for self.
if hasattr(self, 'base_field'):
self.base_field = self.base_field()
super(RangeField, self).__init__(*args, **kwargs)
@property
def model(self):
try:
return self.__dict__['model']
except KeyError:
raise AttributeError("'%s' object has no attribute 'model'" % self.__class__.__name__)
@model.setter
def model(self, model):
self.__dict__['model'] = model
self.base_field.model = model
def get_prep_value(self, value):
if value is None:
return None
elif isinstance(value, Range):
return value
elif isinstance(value, (list, tuple)):
return self.range_type(value[0], value[1])
return value
def to_python(self, value):
if isinstance(value, six.string_types):
# Assume we're deserializing
vals = json.loads(value)
for end in ('lower', 'upper'):
if end in vals:
vals[end] = self.base_field.to_python(vals[end])
value = self.range_type(**vals)
elif isinstance(value, (list, tuple)):
value = self.range_type(value[0], value[1])
return value
def set_attributes_from_name(self, name):
super(RangeField, self).set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
def value_to_string(self, obj):
value = self.value_from_object(obj)
if value is None:
return None
if value.isempty:
return json.dumps({"empty": True})
base_field = self.base_field
result = {"bounds": value._bounds}
for end in ('lower', 'upper'):
val = getattr(value, end)
if val is None:
result[end] = None
else:
obj = AttributeSetter(base_field.attname, val)
result[end] = base_field.value_to_string(obj)
return json.dumps(result)
def formfield(self, **kwargs):
kwargs.setdefault('form_class', self.form_field)
return super(RangeField, self).formfield(**kwargs)
class IntegerRangeField(RangeField):
base_field = models.IntegerField
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return 'int4range'
class BigIntegerRangeField(RangeField):
base_field = models.BigIntegerField
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return 'int8range'
class FloatRangeField(RangeField):
base_field = models.FloatField
range_type = NumericRange
form_field = forms.FloatRangeField
def db_type(self, connection):
return 'numrange'
class DateTimeRangeField(RangeField):
base_field = models.DateTimeField
range_type = DateTimeTZRange
form_field = forms.DateTimeRangeField
def db_type(self, connection):
return 'tstzrange'
class DateRangeField(RangeField):
base_field = models.DateField
range_type = DateRange
form_field = forms.DateRangeField
def db_type(self, connection):
return 'daterange'
RangeField.register_lookup(lookups.DataContains)
RangeField.register_lookup(lookups.ContainedBy)
RangeField.register_lookup(lookups.Overlap)
class RangeContainedBy(models.Lookup):
lookup_name = 'contained_by'
type_mapping = {
'integer': 'int4range',
'bigint': 'int8range',
'double precision': 'numrange',
'date': 'daterange',
'timestamp with time zone': 'tstzrange',
}
def as_sql(self, qn, connection):
field = self.lhs.output_field
if isinstance(field, models.FloatField):
sql = '%s::numeric <@ %s::{}'.format(self.type_mapping[field.db_type(connection)])
else:
sql = '%s <@ %s::{}'.format(self.type_mapping[field.db_type(connection)])
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return sql % (lhs, rhs), params
def get_prep_lookup(self):
return RangeField().get_prep_value(self.rhs)
models.DateField.register_lookup(RangeContainedBy)
models.DateTimeField.register_lookup(RangeContainedBy)
models.IntegerField.register_lookup(RangeContainedBy)
models.BigIntegerField.register_lookup(RangeContainedBy)
models.FloatField.register_lookup(RangeContainedBy)
@RangeField.register_lookup
class FullyLessThan(lookups.PostgresSimpleLookup):
lookup_name = 'fully_lt'
operator = '<<'
@RangeField.register_lookup
class FullGreaterThan(lookups.PostgresSimpleLookup):
lookup_name = 'fully_gt'
operator = '>>'
@RangeField.register_lookup
class NotLessThan(lookups.PostgresSimpleLookup):
lookup_name = 'not_lt'
operator = '&>'
@RangeField.register_lookup
class NotGreaterThan(lookups.PostgresSimpleLookup):
lookup_name = 'not_gt'
operator = '&<'
@RangeField.register_lookup
class AdjacentToLookup(lookups.PostgresSimpleLookup):
lookup_name = 'adjacent_to'
operator = '-|-'
@RangeField.register_lookup
class RangeStartsWith(models.Transform):
lookup_name = 'startswith'
function = 'lower'
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class RangeEndsWith(models.Transform):
lookup_name = 'endswith'
function = 'upper'
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class IsEmpty(models.Transform):
lookup_name = 'isempty'
function = 'isempty'
output_field = models.BooleanField()
|
w1z2g3/crossbar
|
refs/heads/master
|
crossbar/router/service.py
|
1
|
#####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
import json
from twisted.internet.defer import inlineCallbacks
from autobahn import wamp
from autobahn.wamp.exception import ApplicationError
from autobahn.twisted.wamp import ApplicationSession
from crossbar.router.observation import is_protected_uri
from txaio import make_logger
__all__ = ('RouterServiceSession',)
def _is_restricted_session(session):
return session._authrole is None or session._authrole == u'trusted'
# # extract schema information from WAMP-flavored Markdown
# #
# schemas = None
# if 'schemas' in realm:
# schemas = {}
# schema_pat = re.compile(r"```javascript(.*?)```", re.DOTALL)
# cnt_files = 0
# cnt_decls = 0
# for schema_file in realm.pop('schemas'):
# schema_file = os.path.join(self._cbdir, schema_file)
# self.log.info("{worker}: processing WAMP-flavored Markdown file {schema_file} for WAMP schema declarations",
# worker=worker_logname, schema_file=schema_file)
# with open(schema_file, 'r') as f:
# cnt_files += 1
# for d in schema_pat.findall(f.read()):
# try:
# o = json.loads(d)
# if isinstance(o, dict) and '$schema' in o and o['$schema'] == u'http://wamp.ws/schema#':
# uri = o['uri']
# if uri not in schemas:
# schemas[uri] = {}
# schemas[uri].update(o)
# cnt_decls += 1
# except Exception:
# self.log.failure("{worker}: WARNING - failed to process declaration in {schema_file} - {log_failure.value}",
# worker=worker_logname, schema_file=schema_file)
# self.log.info("{worker}: processed {cnt_files} files extracting {cnt_decls} schema declarations and {len_schemas} URIs",
# worker=worker_logname, cnt_files=cnt_files, cnt_decls=cnt_decls, len_schemas=len(schemas))
class RouterServiceSession(ApplicationSession):
"""
Router service session which is used internally by a router to
issue WAMP calls or publish events, and which provides WAMP meta API
procedures.
"""
log = make_logger()
def __init__(self, config, router, schemas=None):
"""
Ctor.
:param config: WAMP application component configuration.
:type config: Instance of :class:`autobahn.wamp.types.ComponentConfig`.
:param router: The router this service session is running for.
:type: router: instance of :class:`crossbar.router.session.CrossbarRouter`
:param schemas: An (optional) initial schema dictionary to load.
:type schemas: dict
"""
ApplicationSession.__init__(self, config)
self._router = router
self._schemas = {}
if schemas:
self._schemas.update(schemas)
self.log.info('initialized schemas cache with {} entries'.format(len(self._schemas)))
@inlineCallbacks
def onJoin(self, details):
self.log.debug('Router service session attached: {}'.format(details))
regs = yield self.register(self)
self.log.debug('Registered {} procedures'.format(len(regs)))
if self.config.extra and 'onready' in self.config.extra:
self.config.extra['onready'].callback(self)
def onUserError(self, failure, msg):
# ApplicationError's are raised explicitly and by purpose to signal
# the peer. The error has already been handled "correctly" from our side.
# Anything else wasn't explicitly treated .. the error "escaped" explicit
# processing on our side. It needs to be logged to CB log, and CB code
# needs to be expanded!
if not isinstance(failure.value, ApplicationError):
super(RouterServiceSession, self).onUserError(failure, msg)
@wamp.register(u'wamp.session.list')
def session_list(self, filter_authroles=None):
"""
Get list of session IDs of sessions currently joined on the router.
:param filter_authroles: If provided, only return sessions with an authrole from this list.
:type filter_authroles: None or list
:returns: List of WAMP session IDs (order undefined).
:rtype: list
"""
assert(filter_authroles is None or type(filter_authroles) == list)
session_ids = []
for session in self._router._session_id_to_session.values():
if not _is_restricted_session(session):
if filter_authroles is None or session._session_details[u'authrole'] in filter_authroles:
session_ids.append(session._session_id)
return session_ids
@wamp.register(u'wamp.session.count')
def session_count(self, filter_authroles=None):
"""
Count sessions currently joined on the router.
:param filter_authroles: If provided, only count sessions with an authrole from this list.
:type filter_authroles: None or list
:returns: Count of joined sessions.
:rtype: int
"""
assert(filter_authroles is None or type(filter_authroles) == list)
session_count = 0
for session in self._router._session_id_to_session.values():
if not _is_restricted_session(session):
if filter_authroles is None or session._session_details[u'authrole'] in filter_authroles:
session_count += 1
return session_count
@wamp.register(u'wamp.session.get')
def session_get(self, session_id):
"""
Get details for given session.
:param session_id: The WAMP session ID to retrieve details for.
:type session_id: int
:returns: WAMP session details.
:rtype: dict or None
"""
if session_id in self._router._session_id_to_session:
session = self._router._session_id_to_session[session_id]
if not _is_restricted_session(session):
return session._session_details
raise ApplicationError(
ApplicationError.NO_SUCH_SESSION,
u'no session with ID {} exists on this router'.format(session_id),
)
@wamp.register(u'wamp.session.kill')
def session_kill(self, session_id, reason=None, message=None):
"""
Forcefully kill a session.
:param session_id: The WAMP session ID of the session to kill.
:type session_id: int
:param reason: A reason URI provided to the killed session.
:type reason: unicode or None
"""
if session_id in self._router._session_id_to_session:
session = self._router._session_id_to_session[session_id]
if not _is_restricted_session(session):
session.leave(reason=reason, message=message)
return
raise ApplicationError(
ApplicationError.NO_SUCH_SESSION,
u'no session with ID {} exists on this router'.format(session_id),
)
@wamp.register(u'wamp.registration.remove_callee')
def registration_remove_callee(self, registration_id, callee_id, reason=None):
"""
Forcefully remove callee from registration.
:param registration_id: The ID of the registration to remove the callee from.
:type registration_id: int
:param callee_id: The WAMP session ID of the callee to remove.
:type callee_id: int
"""
callee = self._router._session_id_to_session.get(callee_id, None)
if not callee:
raise ApplicationError(
ApplicationError.NO_SUCH_SESSION,
u'no session with ID {} exists on this router'.format(callee_id),
)
registration = self._router._dealer._registration_map.get_observation_by_id(registration_id)
if registration and not is_protected_uri(registration.uri):
if callee not in registration.observers:
raise ApplicationError(
ApplicationError.NO_SUCH_REGISTRATION,
u'session {} is not registered on registration {} on this dealer'.format(callee_id, registration_id),
)
self._router._dealer.removeCallee(registration, callee, reason=reason)
else:
raise ApplicationError(
ApplicationError.NO_SUCH_REGISTRATION,
u'no registration with ID {} exists on this dealer'.format(registration_id),
)
@wamp.register(u'wamp.subscription.remove_subscriber')
def subscription_remove_subscriber(self, subscription_id, subscriber_id, reason=None):
"""
Forcefully remove subscriber from subscription.
:param subscription_id: The ID of the subscription to remove the subscriber from.
:type subscription_id: int
:param subscriber_id: The WAMP session ID of the subscriber to remove.
:type subscriber_id: int
"""
subscriber = self._router._session_id_to_session.get(subscriber_id, None)
if not subscriber:
raise ApplicationError(
ApplicationError.NO_SUCH_SESSION,
message=u'no session with ID {} exists on this router'.format(subscriber_id),
)
subscription = self._router._broker._subscription_map.get_observation_by_id(subscription_id)
if subscription and not is_protected_uri(subscription.uri):
if subscriber not in subscription.observers:
raise ApplicationError(
ApplicationError.NO_SUCH_SUBSCRIPTION,
u'session {} is not subscribed on subscription {} on this broker'.format(subscriber_id, subscription_id),
)
self._router._broker.removeSubscriber(subscription, subscriber, reason=reason)
else:
raise ApplicationError(
ApplicationError.NO_SUCH_SUBSCRIPTION,
u'no subscription with ID {} exists on this broker'.format(subscription_id),
)
@wamp.register(u'wamp.registration.get')
def registration_get(self, registration_id):
"""
Get registration details.
:param registration_id: The ID of the registration to retrieve.
:type registration_id: int
:returns: The registration details.
:rtype: dict
"""
registration = self._router._dealer._registration_map.get_observation_by_id(registration_id)
if registration and not is_protected_uri(registration.uri):
registration_details = {
u'id': registration.id,
u'created': registration.created,
u'uri': registration.uri,
u'match': registration.match,
u'invoke': registration.extra.invoke,
}
return registration_details
else:
raise ApplicationError(
ApplicationError.NO_SUCH_REGISTRATION,
u'no registration with ID {} exists on this dealer'.format(registration_id),
)
@wamp.register(u'wamp.subscription.get')
def subscription_get(self, subscription_id):
"""
Get subscription details.
:param subscription_id: The ID of the subscription to retrieve.
:type subscription_id: int
:returns: The subscription details.
:rtype: dict
"""
subscription = self._router._broker._subscription_map.get_observation_by_id(subscription_id)
if subscription and not is_protected_uri(subscription.uri):
subscription_details = {
u'id': subscription.id,
u'created': subscription.created,
u'uri': subscription.uri,
u'match': subscription.match,
}
return subscription_details
else:
raise ApplicationError(
ApplicationError.NO_SUCH_SUBSCRIPTION,
u'no subscription with ID {} exists on this broker'.format(subscription_id),
)
@wamp.register(u'wamp.registration.list')
def registration_list(self):
"""
List current registrations.
:returns: A dictionary with three entries for the match policies 'exact', 'prefix'
and 'wildcard', with a list of registration IDs for each.
:rtype: dict
"""
registration_map = self._router._dealer._registration_map
registrations_exact = []
for registration in registration_map._observations_exact.values():
if not is_protected_uri(registration.uri):
registrations_exact.append(registration.id)
registrations_prefix = []
for registration in registration_map._observations_prefix.values():
if not is_protected_uri(registration.uri):
registrations_prefix.append(registration.id)
registrations_wildcard = []
for registration in registration_map._observations_wildcard.values():
if not is_protected_uri(registration.uri):
registrations_wildcard.append(registration.id)
return {
u'exact': registrations_exact,
u'prefix': registrations_prefix,
u'wildcard': registrations_wildcard,
}
@wamp.register(u'wamp.subscription.list')
def subscription_list(self):
"""
List current subscriptions.
:returns: A dictionary with three entries for the match policies 'exact', 'prefix'
and 'wildcard', with a list of subscription IDs for each.
:rtype: dict
"""
subscription_map = self._router._broker._subscription_map
subscriptions_exact = []
for subscription in subscription_map._observations_exact.values():
if not is_protected_uri(subscription.uri):
subscriptions_exact.append(subscription.id)
subscriptions_prefix = []
for subscription in subscription_map._observations_prefix.values():
if not is_protected_uri(subscription.uri):
subscriptions_prefix.append(subscription.id)
subscriptions_wildcard = []
# FIXME
# for subscription in subscription_map._observations_wildcard.values():
# if not is_protected_uri(subscription.uri):
# subscriptions_wildcard.append(subscription.id)
return {
u'exact': subscriptions_exact,
u'prefix': subscriptions_prefix,
u'wildcard': subscriptions_wildcard,
}
@wamp.register(u'wamp.registration.match')
def registration_match(self, procedure):
"""
Given a procedure URI, return the registration best matching the procedure.
This essentially models what a dealer does for dispatching an incoming call.
:param procedure: The procedure to match.
:type procedure: unicode
:returns: The best matching registration or ``None``.
:rtype: obj or None
"""
registration = self._router._dealer._registration_map.best_matching_observation(procedure)
if registration and not is_protected_uri(registration.uri):
return registration.id
else:
return None
@wamp.register(u'wamp.subscription.match')
def subscription_match(self, topic):
"""
Given a topic URI, returns all subscriptions matching the topic.
This essentially models what a broker does for dispatching an incoming publication.
:param topic: The topic to match.
:type topic: unicode
:returns: All matching subscriptions or ``None``.
:rtype: obj or None
"""
subscriptions = self._router._broker._subscription_map.match_observations(topic)
if subscriptions:
subscription_ids = []
for subscription in subscriptions:
if not is_protected_uri(subscription.uri):
subscription_ids.append(subscription.id)
if subscription_ids:
return subscription_ids
else:
return None
else:
return None
@wamp.register(u'wamp.registration.lookup')
def registration_lookup(self, procedure, options=None):
"""
Given a procedure URI (and options), return the registration (if any) managing the procedure.
This essentially models what a dealer does when registering for a procedure.
:param procedure: The procedure to lookup the registration for.
:type procedure: unicode
:param options: Same options as when registering a procedure.
:type options: dict or None
:returns: The ID of the registration managing the procedure or ``None``.
:rtype: int or None
"""
options = options or {}
match = options.get(u'match', u'exact')
registration = self._router._dealer._registration_map.get_observation(procedure, match)
if registration and not is_protected_uri(registration.uri):
return registration.id
else:
return None
@wamp.register(u'wamp.subscription.lookup')
def subscription_lookup(self, topic, options=None):
"""
Given a topic URI (and options), return the subscription (if any) managing the topic.
This essentially models what a broker does when subscribing for a topic.
:param topic: The topic to lookup the subscription for.
:type topic: unicode
:param options: Same options as when subscribing to a topic.
:type options: dict or None
:returns: The ID of the subscription managing the topic or ``None``.
:rtype: int or None
"""
options = options or {}
match = options.get(u'match', u'exact')
subscription = self._router._broker._subscription_map.get_observation(topic, match)
if subscription and not is_protected_uri(subscription.uri):
return subscription.id
else:
return None
@wamp.register(u'wamp.registration.list_callees')
def registration_list_callees(self, registration_id):
"""
Retrieve list of callees (WAMP session IDs) registered on (attached to) a registration.
:param registration_id: The ID of the registration to get callees for.
:type registration_id: int
:returns: A list of WAMP session IDs of callees currently attached to the registration.
:rtype: list
"""
registration = self._router._dealer._registration_map.get_observation_by_id(registration_id)
if registration and not is_protected_uri(registration.uri):
session_ids = []
for callee in registration.observers:
session_ids.append(callee._session_id)
return session_ids
else:
raise ApplicationError(
ApplicationError.NO_SUCH_REGISTRATION,
u'no registration with ID {} exists on this dealer'.format(registration_id),
)
@wamp.register(u'wamp.subscription.list_subscribers')
def subscription_list_subscribers(self, subscription_id):
"""
Retrieve list of subscribers (WAMP session IDs) subscribed on (attached to) a subscription.
:param subscription_id: The ID of the subscription to get subscribers for.
:type subscription_id: int
:returns: A list of WAMP session IDs of subscribers currently attached to the subscription.
:rtype: list
"""
subscription = self._router._broker._subscription_map.get_observation_by_id(subscription_id)
if subscription and not is_protected_uri(subscription.uri):
session_ids = []
for subscriber in subscription.observers:
session_ids.append(subscriber._session_id)
return session_ids
else:
raise ApplicationError(
ApplicationError.NO_SUCH_SUBSCRIPTION,
u'no subscription with ID {} exists on this broker'.format(subscription_id),
)
@wamp.register(u'wamp.registration.count_callees')
def registration_count_callees(self, registration_id):
"""
Retrieve number of callees registered on (attached to) a registration.
:param registration_id: The ID of the registration to get the number of callees for.
:type registration_id: int
:returns: Number of callees currently attached to the registration.
:rtype: int
"""
registration = self._router._dealer._registration_map.get_observation_by_id(registration_id)
if registration and not is_protected_uri(registration.uri):
return len(registration.observers)
else:
raise ApplicationError(
ApplicationError.NO_SUCH_REGISTRATION,
u'no registration with ID {} exists on this dealer'.format(registration_id),
)
@wamp.register(u'wamp.subscription.count_subscribers')
def subscription_count_subscribers(self, subscription_id):
"""
Retrieve number of subscribers subscribed on (attached to) a subscription.
:param subscription_id: The ID of the subscription to get the number subscribers for.
:type subscription_id: int
:returns: Number of subscribers currently attached to the subscription.
:rtype: int
"""
subscription = self._router._broker._subscription_map.get_observation_by_id(subscription_id)
if subscription and not is_protected_uri(subscription.uri):
return len(subscription.observers)
else:
raise ApplicationError(
ApplicationError.NO_SUCH_SUBSCRIPTION,
u'no subscription with ID {} exists on this broker'.format(subscription_id),
)
@wamp.register(u'wamp.subscription.get_events')
def subscription_get_events(self, subscription_id, limit=10):
"""
Return history of events for given subscription.
:param subscription_id: The ID of the subscription to get events for.
:type subscription_id: int
:param limit: Return at most this many events.
:type limit: int
:returns: List of events.
:rtype: list
"""
self.log.debug('subscription_get_events({subscription_id}, {limit})', subscription_id=subscription_id, limit=limit)
if not self._router._broker._event_store:
raise ApplicationError(
u'wamp.error.history_unavailable',
message=u'event history not available or enabled',
)
subscription = self._router._broker._subscription_map.get_observation_by_id(subscription_id)
if subscription and not is_protected_uri(subscription.uri):
events = self._router._broker._event_store.get_events(subscription_id, limit)
if events is None:
# a return value of None in above signals that event history really
# is not available/enabled (which is different from an empty history!)
raise ApplicationError(
u'wamp.error.history_unavailable',
message=u'event history for the given subscription is not available or enabled',
)
else:
return events
else:
raise ApplicationError(
ApplicationError.NO_SUCH_SUBSCRIPTION,
u'no subscription with ID {} exists on this broker'.format(subscription_id),
)
@wamp.register(u'wamp.test.exception')
def test_exception(self):
raise ApplicationError(u'wamp.error.history_unavailable')
@wamp.register(u'wamp.schema.describe')
def schema_describe(self, uri=None):
"""
Describe a given URI or all URIs.
:param uri: The URI to describe or ``None`` to retrieve all declarations.
:type uri: unicode
:returns: A list of WAMP schema declarations.
:rtype: list
"""
if uri:
return self._schemas.get(uri, None)
else:
return self._schemas
@wamp.register(u'wamp.schema.define')
def schema_define(self, uri, schema):
"""
Declare metadata for a given URI.
:param uri: The URI for which to declare metadata.
:type uri: unicode
:param schema: The WAMP schema declaration for
the URI or `None` to remove any declarations for the URI.
:type schema: dict
:returns: ``None`` if declaration was unchanged, ``True`` if
declaration was new, ``False`` if declaration existed, but was modified.
:rtype: bool or None
"""
if not schema:
if uri in self._schemas:
del self._schemas
self.publish(u'wamp.schema.on_undefine', uri)
return uri
else:
return None
if uri not in self._schemas:
was_new = True
was_modified = False
else:
was_new = False
if json.dumps(schema) != json.dumps(self._schemas[uri]):
was_modified = True
else:
was_modified = False
if was_new or was_modified:
self._schemas[uri] = schema
self.publish(u'wamp.schema.on_define', uri, schema, was_new)
return was_new
else:
return None
|
Frogging101/ArchiveBot
|
refs/heads/master
|
test/integration_runner.py
|
3
|
import atexit
import glob
import logging
import os
import random
import re
import signal
import subprocess
import sys
import time
import irc.client
class Client(irc.client.SimpleIRCClient):
def __init__(self):
irc.client.SimpleIRCClient.__init__(self)
self.flags = {
'queued': False,
'finished': False,
'ident': None,
}
def on_nicknameinuse(self, connection, event):
connection.nick('{}{}'.format(connection.get_nickname(),
random.randint(0, 99))
)
def on_welcome(self, connection, event):
connection.join('#atbot-test')
def on_join(self, connection, event):
channel = event.target
nickname = event.source.nick
if nickname == 'atbot':
connection.privmsg(
channel,
'{}?{}'.format('!ao http://localhost:8866',
random.randint(0, 1000))
)
def on_part(self, connection, event):
channel = event.target
nickname = event.source.nick
def on_quit(self, connection, event):
nickname = event.source.nick
def on_kick(self, connection, event):
channel = event.target
nickname = self.get_nick_if_possible(event.source)
kicked_nickname = event.arguments[0]
def on_mode(self, connection, event):
channel = event.target
modes_str = ' '.join(event.arguments)
nickname = self.get_nick_if_possible(event.source)
def on_pubmsg(self, connection, event):
channel = event.target
if not irc.client.is_channel(channel):
return
text = event.arguments[0]
nickname = self.get_nick_if_possible(event.source)
if 'Queued' in text:
self.flags['queued'] = True
elif 'finished' in text:
self.flags['finished'] = True
elif '!status' in text:
match = re.search(r'!status ([a-z0-9]+)', text)
self.flags['ident'] = match.group(1)
def on_pubnotice(self, connection, event):
channel = event.target
if not irc.client.is_channel(channel):
return
text = event.arguments[0]
nickname = self.get_nick_if_possible(event.source)
def on_topic(self, connection, event):
channel = event.target
nickname = self.get_nick_if_possible(event.source)
text = event.arguments[0]
def on_nick(self, connection, event):
nickname = event.source.nick
text = event.arguments[0]
@classmethod
def get_nick_if_possible(cls, source):
try:
return source.nick
except AttributeError:
return source
def main():
logging.basicConfig(level=logging.DEBUG)
script_dir = os.path.dirname(__file__)
bot_script = os.path.join(script_dir, 'run_bot.sh')
firehose_script = os.path.join(script_dir, 'run_firehose.sh')
dashboard_script = os.path.join(script_dir, 'run_dashboard.sh')
pipeline_script = os.path.join(script_dir, 'run_pipeline.sh')
cogs_script = os.path.join(script_dir, 'run_cogs.sh')
irc_client = Client()
irc_client.connect('127.0.0.1', 6667, 'obsessive')
print('Wait to avoid reconnect flooding')
for dummy in range(100):
irc_client.reactor.process_once(timeout=0.1)
time.sleep(0.1)
print('.', end='')
sys.stdout.flush()
print()
bot_proc = subprocess.Popen([bot_script], preexec_fn=os.setpgrp)
firehose_proc = subprocess.Popen([firehose_script], preexec_fn=os.setpgrp)
dashboard_proc = subprocess.Popen([dashboard_script], preexec_fn=os.setpgrp)
pipeline_proc = subprocess.Popen([pipeline_script], preexec_fn=os.setpgrp)
cogs_proc = subprocess.Popen([cogs_script], preexec_fn=os.setpgrp)
web_proc = subprocess.Popen(
['python3.4', '-m', 'huhhttp', '--port', '8866'],
preexec_fn=os.setpgrp
)
all_procs = [bot_proc, firehose_proc, dashboard_proc, pipeline_proc, cogs_proc, web_proc]
@atexit.register
def cleanup():
for proc in all_procs:
print('Terminate', proc)
try:
os.killpg(os.getpgid(proc.pid), signal.SIGTERM)
except OSError as error:
print(error)
time.sleep(1)
for proc in all_procs:
print('Kill', proc)
try:
os.killpg(os.getpgid(proc.pid), signal.SIGKILL)
except OSError as error:
print(error)
def check_alive():
bot_proc.poll()
dashboard_proc.poll()
pipeline_proc.poll()
web_proc.poll()
cogs_proc.poll()
assert bot_proc.returncode is None, bot_proc.returncode
assert firehose_proc.returncode is None, firehose_proc.returncode
assert dashboard_proc.returncode is None, dashboard_proc.returncode
assert pipeline_proc.returncode is None, pipeline_proc.returncode
assert web_proc.returncode is None, web_proc.returncode
assert cogs_proc.returncode is None, cogs_proc.returncode
time.sleep(2)
check_alive()
start_time = time.time()
while True:
irc_client.reactor.process_once(timeout=0.2)
time_now = time.time()
if time_now - start_time > 5 * 60:
break
if all(irc_client.flags.values()):
break
flags = irc_client.flags
short_ident = flags['ident'][:5]
flags['warc_dir'] = tuple(
glob.glob('/tmp/warc/*{}*.gz'.format(short_ident))
)
flags['rsync_dir'] = tuple(
glob.glob('/tmp/rsync/*{}*.json'.format(short_ident))
)
print('---FIN---')
print(flags)
if not all(flags.values()):
print('FAIL!')
sys.exit(42)
check_alive()
if __name__ == '__main__':
main()
|
arbrandes/edx-platform
|
refs/heads/master
|
lms/djangoapps/certificates/migrations/0004_certificategenerationhistory.py
|
4
|
import django.utils.timezone
import model_utils.fields
from django.conf import settings
from django.db import migrations, models
from opaque_keys.edx.django.models import CourseKeyField
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('instructor_task', '0001_initial'),
('certificates', '0003_data__default_modes'),
]
operations = [
migrations.CreateModel(
name='CertificateGenerationHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('course_id', CourseKeyField(max_length=255)),
('is_regeneration', models.BooleanField(default=False)),
('generated_by', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
('instructor_task', models.ForeignKey(to='instructor_task.InstructorTask', on_delete=models.CASCADE)),
],
),
]
|
gurneyalex/OpenUpgrade
|
refs/heads/master
|
addons/purchase/edi/purchase_order.py
|
439
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
from openerp.addons.edi import EDIMixin
PURCHASE_ORDER_LINE_EDI_STRUCT = {
'name': True,
'date_planned': True,
'product_id': True,
'product_uom': True,
'price_unit': True,
'product_qty': True,
# fields used for web preview only - discarded on import
'price_subtotal': True,
}
PURCHASE_ORDER_EDI_STRUCT = {
'company_id': True, # -> to be changed into partner
'name': True,
'partner_ref': True,
'origin': True,
'date_order': True,
'partner_id': True,
#custom: 'partner_address',
'notes': True,
'order_line': PURCHASE_ORDER_LINE_EDI_STRUCT,
#custom: currency_id
# fields used for web preview only - discarded on import
'amount_total': True,
'amount_untaxed': True,
'amount_tax': True,
'state':True,
}
class purchase_order(osv.osv, EDIMixin):
_inherit = 'purchase.order'
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
"""Exports a purchase order"""
edi_struct = dict(edi_struct or PURCHASE_ORDER_EDI_STRUCT)
res_company = self.pool.get('res.company')
res_partner_obj = self.pool.get('res.partner')
edi_doc_list = []
for order in records:
# generate the main report
self._edi_generate_report_attachment(cr, uid, order, context=context)
# Get EDI doc based on struct. The result will also contain all metadata fields and attachments.
edi_doc = super(purchase_order,self).edi_export(cr, uid, [order], edi_struct, context)[0]
edi_doc.update({
# force trans-typing to purchase.order upon import
'__import_model': 'sale.order',
'__import_module': 'sale',
'company_address': res_company.edi_export_address(cr, uid, order.company_id, context=context),
'partner_address': res_partner_obj.edi_export(cr, uid, [order.partner_id], context=context)[0],
'currency': self.pool.get('res.currency').edi_export(cr, uid, [order.pricelist_id.currency_id],
context=context)[0],
})
if edi_doc.get('order_line'):
for line in edi_doc['order_line']:
line['__import_model'] = 'sale.order.line'
edi_doc_list.append(edi_doc)
return edi_doc_list
def edi_import_company(self, cr, uid, edi_document, context=None):
# TODO: for multi-company setups, we currently import the document in the
# user's current company, but we should perhaps foresee a way to select
# the desired company among the user's allowed companies
self._edi_requires_attributes(('company_id','company_address'), edi_document)
res_partner = self.pool.get('res.partner')
xid, company_name = edi_document.pop('company_id')
# Retrofit address info into a unified partner info (changed in v7 - used to keep them separate)
company_address_edi = edi_document.pop('company_address')
company_address_edi['name'] = company_name
company_address_edi['is_company'] = True
company_address_edi['__import_model'] = 'res.partner'
company_address_edi['__id'] = xid # override address ID, as of v7 they should be the same anyway
if company_address_edi.get('logo'):
company_address_edi['image'] = company_address_edi.pop('logo')
company_address_edi['supplier'] = True
partner_id = res_partner.edi_import(cr, uid, company_address_edi, context=context)
# modify edi_document to refer to new partner
partner = res_partner.browse(cr, uid, partner_id, context=context)
partner_edi_m2o = self.edi_m2o(cr, uid, partner, context=context)
edi_document['partner_id'] = partner_edi_m2o
edi_document.pop('partner_address', None) # ignored, that's supposed to be our own address!
return partner_id
def _edi_get_pricelist(self, cr, uid, partner_id, currency, context=None):
# TODO: refactor into common place for purchase/sale, e.g. into product module
partner_model = self.pool.get('res.partner')
partner = partner_model.browse(cr, uid, partner_id, context=context)
pricelist = partner.property_product_pricelist_purchase
if not pricelist:
pricelist = self.pool.get('ir.model.data').get_object(cr, uid, 'purchase', 'list0', context=context)
if not pricelist.currency_id == currency:
# look for a pricelist with the right type and currency, or make a new one
pricelist_type = 'purchase'
product_pricelist = self.pool.get('product.pricelist')
match_pricelist_ids = product_pricelist.search(cr, uid,[('type','=',pricelist_type),
('currency_id','=',currency.id)])
if match_pricelist_ids:
pricelist_id = match_pricelist_ids[0]
else:
pricelist_name = _('EDI Pricelist (%s)') % (currency.name,)
pricelist_id = product_pricelist.create(cr, uid, {'name': pricelist_name,
'type': pricelist_type,
'currency_id': currency.id,
})
self.pool.get('product.pricelist.version').create(cr, uid, {'name': pricelist_name,
'pricelist_id': pricelist_id})
pricelist = product_pricelist.browse(cr, uid, pricelist_id)
return self.edi_m2o(cr, uid, pricelist, context=context)
def _edi_get_location(self, cr, uid, partner_id, context=None):
partner_model = self.pool.get('res.partner')
partner = partner_model.browse(cr, uid, partner_id, context=context)
location = partner.property_stock_customer
if not location:
location = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'stock_location_stock', context=context)
return self.edi_m2o(cr, uid, location, context=context)
def edi_import(self, cr, uid, edi_document, context=None):
self._edi_requires_attributes(('company_id','company_address','order_line','date_order','currency'), edi_document)
#import company as a new partner
partner_id = self.edi_import_company(cr, uid, edi_document, context=context)
# currency for rounding the discount calculations and for the pricelist
res_currency = self.pool.get('res.currency')
currency_info = edi_document.pop('currency')
currency_id = res_currency.edi_import(cr, uid, currency_info, context=context)
order_currency = res_currency.browse(cr, uid, currency_id)
partner_ref = edi_document.pop('partner_ref', False)
edi_document['partner_ref'] = edi_document['name']
edi_document['name'] = partner_ref or edi_document['name']
edi_document['pricelist_id'] = self._edi_get_pricelist(cr, uid, partner_id, order_currency, context=context)
edi_document['location_id'] = self._edi_get_location(cr, uid, partner_id, context=context)
# discard web preview fields, if present
edi_document.pop('amount_total', None)
edi_document.pop('amount_tax', None)
edi_document.pop('amount_untaxed', None)
edi_document.pop('payment_term', None)
edi_document.pop('order_policy', None)
edi_document.pop('user_id', None)
for order_line in edi_document['order_line']:
self._edi_requires_attributes(('date_planned', 'product_id', 'product_uom', 'product_qty', 'price_unit'), order_line)
# original sale order contains unit price and discount, but not final line price
discount = order_line.pop('discount', 0.0)
if discount:
order_line['price_unit'] = res_currency.round(cr, uid, order_currency,
(order_line['price_unit'] * (1 - (discount or 0.0) / 100.0)))
# sale order lines have sequence numbers, not purchase order lines
order_line.pop('sequence', None)
# discard web preview fields, if present
order_line.pop('price_subtotal', None)
return super(purchase_order,self).edi_import(cr, uid, edi_document, context=context)
class purchase_order_line(osv.osv, EDIMixin):
_inherit='purchase.order.line'
|
steedos/odoo7
|
refs/heads/master
|
openerp/addons/account_analytic_plans/wizard/analytic_plan_create_model.py
|
52
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class analytic_plan_create_model(osv.osv_memory):
_name = "analytic.plan.create.model"
_description = "analytic.plan.create.model"
def activate(self, cr, uid, ids, context=None):
plan_obj = self.pool.get('account.analytic.plan.instance')
mod_obj = self.pool.get('ir.model.data')
anlytic_plan_obj = self.pool.get('account.analytic.plan')
if context is None:
context = {}
if 'active_id' in context and context['active_id']:
plan = plan_obj.browse(cr, uid, context['active_id'], context=context)
if (not plan.name) or (not plan.code):
raise osv.except_osv(_('Error!'), _('Please put a name and a code before saving the model.'))
pids = anlytic_plan_obj.search(cr, uid, [], context=context)
if not pids:
raise osv.except_osv(_('Error!'), _('There is no analytic plan defined.'))
plan_obj.write(cr, uid, [context['active_id']], {'plan_id':pids[0]}, context=context)
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'),('name', '=', 'view_analytic_plan_create_model')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'name': _('Distribution Model Saved'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'analytic.plan.create.model',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
else:
return {'type': 'ir.actions.act_window_close'}
analytic_plan_create_model()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
polaris/boids
|
refs/heads/master
|
lib/googletest-82b11b8/googletest/scripts/fuse_gtest_files.py
|
2577
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""fuse_gtest_files.py v0.2.0
Fuses Google Test source code into a .h file and a .cc file.
SYNOPSIS
fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR
Scans GTEST_ROOT_DIR for Google Test source code, and generates
two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.
Then you can build your tests by adding OUTPUT_DIR to the include
search path and linking with OUTPUT_DIR/gtest/gtest-all.cc. These
two files contain everything you need to use Google Test. Hence
you can "install" Google Test by copying them to wherever you want.
GTEST_ROOT_DIR can be omitted and defaults to the parent
directory of the directory holding this script.
EXAMPLES
./fuse_gtest_files.py fused_gtest
./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest
This tool is experimental. In particular, it assumes that there is no
conditional inclusion of Google Test headers. Please report any
problems to googletestframework@googlegroups.com. You can read
http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide for
more information.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
# We assume that this file is in the scripts/ directory in the Google
# Test root directory.
DEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
# Regex for matching '#include "gtest/..."'.
INCLUDE_GTEST_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gtest/.+)"')
# Regex for matching '#include "src/..."'.
INCLUDE_SRC_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(src/.+)"')
# Where to find the source seed files.
GTEST_H_SEED = 'include/gtest/gtest.h'
GTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'
GTEST_ALL_CC_SEED = 'src/gtest-all.cc'
# Where to put the generated files.
GTEST_H_OUTPUT = 'gtest/gtest.h'
GTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'
def VerifyFileExists(directory, relative_path):
"""Verifies that the given file exists; aborts on failure.
relative_path is the file path relative to the given directory.
"""
if not os.path.isfile(os.path.join(directory, relative_path)):
print 'ERROR: Cannot find %s in directory %s.' % (relative_path,
directory)
print ('Please either specify a valid project root directory '
'or omit it on the command line.')
sys.exit(1)
def ValidateGTestRootDir(gtest_root):
"""Makes sure gtest_root points to a valid gtest root directory.
The function aborts the program on failure.
"""
VerifyFileExists(gtest_root, GTEST_H_SEED)
VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)
def VerifyOutputFile(output_dir, relative_path):
"""Verifies that the given output file path is valid.
relative_path is relative to the output_dir directory.
"""
# Makes sure the output file either doesn't exist or can be overwritten.
output_file = os.path.join(output_dir, relative_path)
if os.path.exists(output_file):
# TODO(wan@google.com): The following user-interaction doesn't
# work with automated processes. We should provide a way for the
# Makefile to force overwriting the files.
print ('%s already exists in directory %s - overwrite it? (y/N) ' %
(relative_path, output_dir))
answer = sys.stdin.readline().strip()
if answer not in ['y', 'Y']:
print 'ABORTED.'
sys.exit(1)
# Makes sure the directory holding the output file exists; creates
# it and all its ancestors if necessary.
parent_directory = os.path.dirname(output_file)
if not os.path.isdir(parent_directory):
os.makedirs(parent_directory)
def ValidateOutputDir(output_dir):
"""Makes sure output_dir points to a valid output directory.
The function aborts the program on failure.
"""
VerifyOutputFile(output_dir, GTEST_H_OUTPUT)
VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)
def FuseGTestH(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest.h in output_dir."""
output_file = file(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')
processed_files = sets.Set() # Holds all gtest headers we've processed.
def ProcessFile(gtest_header_path):
"""Processes the given gtest header file."""
# We don't process the same header twice.
if gtest_header_path in processed_files:
return
processed_files.add(gtest_header_path)
# Reads each line in the given gtest header.
for line in file(os.path.join(gtest_root, gtest_header_path), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/..."' - let's process it recursively.
ProcessFile('include/' + m.group(1))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GTEST_H_SEED)
output_file.close()
def FuseGTestAllCcToFile(gtest_root, output_file):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_file."""
processed_files = sets.Set()
def ProcessFile(gtest_source_file):
"""Processes the given gtest source file."""
# We don't process the same #included file twice.
if gtest_source_file in processed_files:
return
processed_files.add(gtest_source_file)
# Reads each line in the given gtest source file.
for line in file(os.path.join(gtest_root, gtest_source_file), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
if 'include/' + m.group(1) == GTEST_SPI_H_SEED:
# It's '#include "gtest/gtest-spi.h"'. This file is not
# #included by "gtest/gtest.h", so we need to process it.
ProcessFile(GTEST_SPI_H_SEED)
else:
# It's '#include "gtest/foo.h"' where foo is not gtest-spi.
# We treat it as '#include "gtest/gtest.h"', as all other
# gtest headers are being fused into gtest.h and cannot be
# #included directly.
# There is no need to #include "gtest/gtest.h" more than once.
if not GTEST_H_SEED in processed_files:
processed_files.add(GTEST_H_SEED)
output_file.write('#include "%s"\n' % (GTEST_H_OUTPUT,))
else:
m = INCLUDE_SRC_FILE_REGEX.match(line)
if m:
# It's '#include "src/foo"' - let's process it recursively.
ProcessFile(m.group(1))
else:
output_file.write(line)
ProcessFile(GTEST_ALL_CC_SEED)
def FuseGTestAllCc(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir."""
output_file = file(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')
FuseGTestAllCcToFile(gtest_root, output_file)
output_file.close()
def FuseGTest(gtest_root, output_dir):
"""Fuses gtest.h and gtest-all.cc."""
ValidateGTestRootDir(gtest_root)
ValidateOutputDir(output_dir)
FuseGTestH(gtest_root, output_dir)
FuseGTestAllCc(gtest_root, output_dir)
def main():
argc = len(sys.argv)
if argc == 2:
# fuse_gtest_files.py OUTPUT_DIR
FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])
elif argc == 3:
# fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR
FuseGTest(sys.argv[1], sys.argv[2])
else:
print __doc__
sys.exit(1)
if __name__ == '__main__':
main()
|
jaddison/ansible
|
refs/heads/devel
|
lib/ansible/plugins/lookup/subelements.py
|
103
|
# (c) 2013, Serge van Ginderachter <serge@vanginderachter.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import *
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.boolean import boolean
FLAGS = ('skip_missing',)
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
def _raise_terms_error(msg=""):
raise AnsibleError(
"subelements lookup expects a list of two or three items, "
+ msg)
terms[0] = listify_lookup_plugin_terms(terms[0], templar=self._templar, loader=self._loader)
# check lookup terms - check number of terms
if not isinstance(terms, list) or not 2 <= len(terms) <= 3:
_raise_terms_error()
# first term should be a list (or dict), second a string holding the subkey
if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], basestring):
_raise_terms_error("first a dict or a list, second a string pointing to the subkey")
subelements = terms[1].split(".")
if isinstance(terms[0], dict): # convert to list:
if terms[0].get('skipped', False) is not False:
# the registered result was completely skipped
return []
elementlist = []
for key in terms[0].iterkeys():
elementlist.append(terms[0][key])
else:
elementlist = terms[0]
# check for optional flags in third term
flags = {}
if len(terms) == 3:
flags = terms[2]
if not isinstance(flags, dict) and not all([isinstance(key, basestring) and key in FLAGS for key in flags]):
_raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS)
# build_items
ret = []
for item0 in elementlist:
if not isinstance(item0, dict):
raise AnsibleError("subelements lookup expects a dictionary, got '%s'" % item0)
if item0.get('skipped', False) is not False:
# this particular item is to be skipped
continue
skip_missing = boolean(flags.get('skip_missing', False))
subvalue = item0
lastsubkey = False
sublist = []
for subkey in subelements:
if subkey == subelements[-1]:
lastsubkey = True
if not subkey in subvalue:
if skip_missing:
continue
else:
raise AnsibleError("could not find '%s' key in iterated item '%s'" % (subkey, subvalue))
if not lastsubkey:
if not isinstance(subvalue[subkey], dict):
if skip_missing:
continue
else:
raise AnsibleError("the key %s should point to a dictionary, got '%s'" % (subkey, subvalue[subkey]))
else:
subvalue = subvalue[subkey]
else: # lastsubkey
if not isinstance(subvalue[subkey], list):
raise AnsibleError("the key %s should point to a list, got '%s'" % (subkey, subvalue[subkey]))
else:
sublist = subvalue.pop(subkey, [])
for item1 in sublist:
ret.append((item0, item1))
return ret
|
dsaraujo/circulante
|
refs/heads/master
|
django/conf/locale/sr_Latn/formats.py
|
655
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
'%Y-%m-%d', # '2006-10-25'
# '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.'
# '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.'
# '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
uwcirg/true_nth_usa_portal
|
refs/heads/develop
|
portal/config/config_persistence.py
|
1
|
import os
from flask import current_app
from .config import SITE_CFG
from .model_persistence import ModelPersistence
class ConfigPersistence(ModelPersistence):
def __init__(self, target_dir):
super(ConfigPersistence, self).__init__(
model_class=None, target_dir=target_dir)
def import_(self, keep_unmentioned=None):
data = self.__read__()
self.__verify_header__(data)
cfg_file = os.path.join(current_app.instance_path, SITE_CFG)
if len(data['entry']) != 1:
raise ValueError(
"only expecting single {} as an entry in {}".format(
SITE_CFG, self.filename))
cfg_data = data['entry'][0]
if cfg_data.get('resourceType') != SITE_CFG:
raise ValueError(
"didn't find expected 'resourceType': {}".format(
SITE_CFG))
with open(cfg_file, 'w') as fp:
for line in cfg_data['results']:
fp.write(line)
def serialize(self):
cfg_file = os.path.join(current_app.instance_path, SITE_CFG)
with open(cfg_file, 'r') as fp:
results = [line for line in fp.readlines()]
# Package like all other resourceType bundles
return [{"resourceType": SITE_CFG, "results": results}]
def export_config(target_dir):
config_persistence = ConfigPersistence(target_dir=target_dir)
return config_persistence.export()
def import_config(target_dir):
config_persistence = ConfigPersistence(target_dir=target_dir)
config_persistence.import_()
|
leogulus/pisco_pipeline
|
refs/heads/master
|
panstarr_photometry_all.py
|
1
|
import sys, os, re, yaml, subprocess, shlex, FITS_tools
import pandas as pd
import numpy as np
import pickle
import matplotlib
import matplotlib.pyplot as plt
from matplotlib import image
import matplotlib.cm as cm
import matplotlib.image as mpimg
from scipy.optimize import curve_fit
import scipy.integrate as integrate
from scipy import interpolate
from scipy.interpolate import interp1d
import scipy.stats
from astropy.io import fits
from astropy.table import Table, join
from astropy import units as u
from astropy.coordinates import SkyCoord
from astropy.cosmology import FlatLambdaCDM
cosmo = FlatLambdaCDM(H0=71, Om0=0.3, Tcmb0=2.725)
import extra_program as ex
from PIL import Image as Image_PIL
import ebvpy #Galactic Reddening
"""
Example:
python pisco_pipeline/panstarr_photometry_all.py PKS1353 psf allslr 2mass
python pisco_pipeline/panstarr_photometry_all.py PKS1353 psf allslr no2mass
python pisco_pipeline/panstarr_photometry_all.py PKS1353 psf noslr no2mass
python pisco_pipeline/panstarr_photometry_all.py PKS1353 model noslr no2mass
python pisco_pipeline/panstarr_photometry_all.py PKS1353 auto slr 2mass
field: name of the fields
mode: psf, auto, aper, hybrid, model
allslr:
- allslr: run everything including photometry_v4, cut_frame, SLR
- slr: run just SLR and update the color
- noslr: don't run slr, just update the color with different modes
2mass
- 2mass: run SLR with 2MASS to match
- no2mass: run SLR without 2MASS
"""
###--------------------------------------------------------------------------###
def find_seeing(field,band):
df_see=pd.read_csv('/Users/taweewat/Documents/red_sequence/total_chips_field_seeing.csv',index_col=0)
if field[0:5]=='CHIPS':
seeing = df_see[df_see.chips==field]['seeing_q25_%s'%band].values[0] #_%s'%band
return seeing
elif (field[0:5]=='Field')|(field[0:3]=='PKS')|(field[0:4]=='SDSS'):
seeing = df_see[df_see.name==field]['seeing_q25_%s'%band].values[0] #_%s'%band
return seeing
def find_seeing_fits(field):
home='/Users/taweewat/Documents/pisco_code/'
dirs=['ut170103/','ut170104/','ut170619/','ut170621/','ut170624/','ut171208/','ut171209/','ut171212/']
myReg=re.compile(r'(%s_A).*'%field)
for di in dirs:
dir=home+di
for text in os.listdir(dir):
if myReg.search(text) != None:
seeing=float(fits.open(dir+myReg.search(text).group())[0].header['FWHM1'])
seeing=0.5
return seeing
def read_param():
with open("pisco_pipeline/params.yaml", 'r') as stream:
try:
param=yaml.load(stream)
return param
except yaml.YAMLError as exc:
print(exc)
def read_param_izp(mode):
if mode=='psf':
mode_izp=''
elif mode=='model':
mode_izp='' #'_model'
else:
mode_izp=''
# print "/Users/taweewat/Documents/pisco_code/pisco_pipeline/params_izeropoint%s.yaml" % mode_izp
with open("/Users/taweewat/Documents/pisco_code/pisco_pipeline/params_izeropoint%s.yaml"%mode_izp, 'r') as stream:
try:
param=yaml.load(stream)
return param
except yaml.YAMLError as exc:
print(exc)
def star_galaxy_bleem(field):
sg_dir = 'star_galaxy'
if not os.path.exists(sg_dir):
os.makedirs(sg_dir)
param=read_param()
seeing=find_seeing(field,'i')
# seeing=1.5
# seeing=0.95
minarea=1.7
data, header = fits.getdata('final/coadd_c%s_i.fits'%field, header=True)
data2=data**2
pxscale=0.22
fits.writeto('final/coadd_c%s_sq_i.fits'%field, data2, header=header, overwrite=True)
cmd='sex final/coadd_c%s_i.fits -c pisco_pipeline/config.sex -PARAMETERS_NAME pisco_pipeline/%s -CATALOG_NAME %s -CATALOG_TYPE FITS_1.0 -SEEING_FWHM %s -SATUR_LEVEL %s -PHOT_APERTURES 15 -PIXEL_SCALE %s -DETECT_MINAREA %s -CHECKIMAGE_NAME checki.fits,segmenti.fits'%\
(field,'sex.param',sg_dir+'/%s_catalog.fits'%(field),str(seeing),str(param['satur_level_i_psf']),str(pxscale),str(1.1/minarea*np.pi*(seeing/pxscale)**2)); print cmd
subprocess.check_call(shlex.split(cmd))
cmd='sex final/coadd_c%s_i.fits,final/coadd_c%s_sq_i.fits -c pisco_pipeline/config.sex -PARAMETERS_NAME pisco_pipeline/%s -CATALOG_NAME %s -CATALOG_TYPE FITS_1.0 -SEEING_FWHM %s -SATUR_LEVEL %s -PHOT_APERTURES 15 -PIXEL_SCALE %s -DETECT_MINAREA %s'%\
(field,field,'sex.param',sg_dir+'/%s_sq_catalog.fits'%(field),str(seeing),str(param['satur_level_i_sq_psf']),str(pxscale),str(1.1/minarea*np.pi*(seeing/pxscale)**2)); print cmd
subprocess.check_call(shlex.split(cmd))
def pisco_photometry_v4(field):
def aperature_proj(field,band):
param=read_param()
# seeing=find_seeing(field,band)
# seeing=1.5
seeing=0.5
seeing_class=1.8
# saturation=9.0
saturation=54000.
data, header = fits.getdata('/Users/taweewat/Documents/red_sequence/panstar/coadd_panstar_{}_{}.fits'.format(field,band), header=True)
data2=data*6000.
fits.writeto('/Users/taweewat/Documents/red_sequence/panstar/coadd_scaled_panstar_{}_{}.fits'.format(field,band), data2, header=header, overwrite=True)
slrdir = 'slr_output'
# to_be_projected = '/Users/taweewat/Documents/red_sequence/panstar/coadd_panstar_{}_{}.fits'.format(field,band)
to_be_projected = '/Users/taweewat/Documents/red_sequence/panstar/coadd_scaled_panstar_{}_{}.fits'.format(field,band)
reference_fits = '/Users/taweewat/Documents/red_sequence/panstar/coadd_panstar_{}_i.fits'.format(field)
im1,im2, header = FITS_tools.match_fits(to_be_projected,reference_fits,return_header=True)
# outname = 'final/proj_coadd_panstar_%s_%s.fits'%(field,band)
outname = 'final/proj_coadd_panstar_%s_%s.fits'%(field,band)
print 'projecting from %s band to i band the fits file '%band + outname
fits.writeto(outname, im1, header, overwrite=True)
minarea=1.7 #1.7
pxscale=0.25
cmd='sex /Users/taweewat/Documents/red_sequence/panstar/coadd_scaled_panstar_%s_%s.fits -c pisco_pipeline/config.sex -PARAMETERS_NAME pisco_pipeline/%s -CATALOG_NAME %s -SEEING_FWHM %s -SATUR_LEVEL %s -PHOT_APERTURES 23 -PIXEL_SCALE %s -DETECT_MINAREA %s -CHECKIMAGE_NAME check_panstar_psf_%s.fits,segment_panstar_psf_%s.fits'%\
(field,band,'sex_fwhm_psf.param','psfex_output/psf_%s_%s.fits'%(field,band),str(seeing_class),str(saturation),str(pxscale),str(1.1/minarea*np.pi*(seeing/pxscale)**2), band, band)
print cmd
subprocess.check_call(shlex.split(cmd))
Tf=Table(fits.open('psfex_output/psf_%s_%s.fits'%(field,band))[2].data)
Tf=Tf[(Tf['FLUX_APER']>0)]
df0 = pd.read_csv('/Users/taweewat/Documents/red_sequence/{}_star_list.csv'.format(field),index_col=0)
x=np.array([286.0227455650082,285.9411038202907,286.0569138078614,285.9817436730952,286.00556207826133,286.01921620713756])
real=np.array([287.7272544,287.8089409,287.6931021,287.7682687,287.7444376,287.7307988])
p=np.poly1d(np.polyfit(x,real,1))
# c0 = SkyCoord(ra=df0['raMean'].values*u.degree, dec=df0['decMean'].values*u.degree)
# csex = SkyCoord(ra=p(np.array(Tf['ALPHA_J2000']))*u.degree, dec=np.array(Tf['DELTA_J2000'])*u.degree)
# idxn, d2dn, d3dn=csex.match_to_catalog_sky(c0)
# Tfcut=Tf[d2dn.to(u.arcsecond).value<2]
Tfcut=Tf
print "len of Tfcut after 2 arcsecond: {}".format(len(Tfcut))
vignet_bad=[]
for i in range(len(Tfcut)):
vignet_bad.append(np.sum(Tfcut['VIGNET'][i].ravel()<-9e+29))
Tfcut['VIGNET_bad']=vignet_bad
# Tfcut=Tfcut[(Tfcut['FLAGS'] == 0) & (Tfcut['VIGNET_bad'] < 20)]# & (Tfcut['FLUX_APER'] > 300)].copy()
Tfcut = Tfcut[(Tfcut['VIGNET_bad'] < 20)].copy() #(Tfcut['CLASS_STAR'] > 0.70) & (Tfcut['FLAGS'] < 4) &
Tfcut = Tfcut[(-2.5*np.log10(Tfcut['FLUX_APER'])<-16.)] #
Tfcut_edge=Tfcut#[(Tfcut['XWIN_IMAGE']<np.max(Tfcut['XWIN_IMAGE'])-60)&(Tfcut['XWIN_IMAGE']>np.min(Tfcut['XWIN_IMAGE'])+60)&\
#(Tfcut['YWIN_IMAGE']<np.max(Tfcut['YWIN_IMAGE'])-60)&(Tfcut['YWIN_IMAGE']>np.min(Tfcut['YWIN_IMAGE'])+60)].copy()
Tfcut_more=Tfcut_edge[(np.abs(Tfcut_edge['FLUX_RADIUS']-np.mean(Tfcut_edge['FLUX_RADIUS']))<2*np.std(Tfcut_edge['FLUX_RADIUS']))]
Tfcut_more2=Tfcut_more[(np.abs(Tfcut_more['ELONGATION']-np.mean(Tfcut_more['ELONGATION']))<2*np.std(Tfcut_more['ELONGATION']))].copy()
print "length of Tf: all: {}, cut: {}, edges: {}, flux_radius: {}, elong: {}".format(len(Tf), len(Tfcut), len(Tfcut_edge), len(Tfcut_more), len(Tfcut_more2))
hdu = fits.open('psfex_output/psf_%s_%s.fits'%(field,band))
hdu[2].data = hdu[2].data[Tfcut_more2['NUMBER']-1]
# hdu[2].data = hdu[2].data[Tfcut['NUMBER']-1]
hdu.writeto('psfex_output/psf_%s_%s.fits'%(field,band), overwrite=True)
cmd='psfex %s -c pisco_pipeline/panstarr.psfex' % ('psfex_output/psf_%s_%s.fits'%(field,band))
print cmd
subprocess.check_call(shlex.split(cmd))
cmd='sex /Users/taweewat/Documents/red_sequence/panstar/coadd_scaled_panstar_%s_i.fits,final/proj_coadd_panstar_%s_%s.fits -c pisco_pipeline/config.sex -PSF_NAME %s -PARAMETERS_NAME pisco_pipeline/%s -CATALOG_NAME %s -SEEING_FWHM %s -SATUR_LEVEL %s -PIXEL_SCALE %s -CATALOG_TYPE FITS_1.0 -PHOT_APERTURES 23 -DETECT_MINAREA %s -CHECKIMAGE_NAME check%s.fits,segment%s.fits'%\
(field, field, band, 'psfex_output/psf_%s_%s.psf' % (field, band), 'sex_after_psf.param', '%s/a_psf_%s_%s.fits' % (slrdir, field, band),
str(seeing_class), str(saturation), str(pxscale), str(1.1 / minarea * np.pi * (seeing / pxscale)**2), band, band)
print cmd
subprocess.check_call(shlex.split(cmd))
table=Table.read('%s/a_psf_%s_%s.fits'%(slrdir,field,band))
table['ALPHA_J2000']=p(np.array(table['ALPHA_J2000']))
for name in table.colnames[:]:
table.rename_column(name, name + '_%s' % band)
return table
# return Tf
slrdir = 'slr_output'
if not os.path.exists(slrdir):
os.makedirs(slrdir)
tableg=aperature_proj(field,'g')
tablei=aperature_proj(field,'i')
tabler=aperature_proj(field,'r')
# tablez=aperature_proj(field,'z')
print 'len of all table: {}, {}, {}'.format(len(tableg), len(tablei), len(tabler))
ci=SkyCoord(ra=np.array(tablei['ALPHA_J2000_i'])*u.degree, dec=np.array(tablei['DELTA_J2000_i'])*u.degree)# print len(ci)
cg=SkyCoord(ra=np.array(tableg['ALPHA_J2000_g'])*u.degree, dec=np.array(tableg['DELTA_J2000_g'])*u.degree)# print len(cg)
cr=SkyCoord(ra=np.array(tabler['ALPHA_J2000_r'])*u.degree, dec=np.array(tabler['DELTA_J2000_r'])*u.degree)# print len(cr)
# cz=SkyCoord(ra=np.array(tablez['ALPHA_J2000_z'])*u.degree, dec=np.array(tablez['DELTA_J2000_z'])*u.degree)# print len(cz)
idxn, d2dn, d3dn=cg.match_to_catalog_sky(ci)
Table_I=tablei[idxn][['NUMBER_i','XWIN_IMAGE_i','YWIN_IMAGE_i','ALPHA_J2000_i','DELTA_J2000_i','MAG_APER_i','MAGERR_APER_i','MAG_AUTO_i','MAGERR_AUTO_i','MAG_SPHEROID_i','MAGERR_SPHEROID_i',\
'CLASS_STAR_i','FLAGS_i','MAG_PSF_i','MAGERR_PSF_i','MAG_MODEL_i','MAGERR_MODEL_i','SPREAD_MODEL_i','SPREADERR_MODEL_i']]
Table_I.rename_column('ALPHA_J2000_i','ALPHA_J2000')
Table_I.rename_column('DELTA_J2000_i','DELTA_J2000')
idxn, d2dn, d3dn=cg.match_to_catalog_sky(cr)
Table_R=tabler[idxn][['NUMBER_r','ALPHA_J2000_r','DELTA_J2000_r','MAG_APER_r','MAGERR_APER_r','MAG_AUTO_r','MAGERR_AUTO_r','MAG_SPHEROID_r','MAGERR_SPHEROID_r',\
'CLASS_STAR_r','FLAGS_r','MAG_PSF_r','MAGERR_PSF_r','MAG_MODEL_r','MAGERR_MODEL_r','SPREAD_MODEL_r','SPREADERR_MODEL_r']]
Table_R.rename_column('ALPHA_J2000_r','ALPHA_J2000')
Table_R.rename_column('DELTA_J2000_r','DELTA_J2000')
Table_G = tableg[['NUMBER_g', 'ALPHA_J2000_g', 'DELTA_J2000_g', 'MAG_APER_g', 'MAGERR_APER_g', 'MAG_AUTO_g', 'MAGERR_AUTO_g', 'MAG_SPHEROID_g', 'MAGERR_SPHEROID_g',
'CLASS_STAR_g','FLAGS_g','MAG_PSF_g','MAGERR_PSF_g','MAG_MODEL_g','MAGERR_MODEL_g','SPREAD_MODEL_g','SPREADERR_MODEL_g']]
Table_G.rename_column('ALPHA_J2000_g','ALPHA_J2000')
Table_G.rename_column('DELTA_J2000_g','DELTA_J2000')
print 'len of all new table', len(Table_G), len(Table_I), len(Table_R)
total=join(join(Table_I,Table_G,keys=['ALPHA_J2000','DELTA_J2000']),Table_R,keys=['ALPHA_J2000','DELTA_J2000'])
total.write(os.path.join(slrdir, 'total0_psf_%s.csv' % field), overwrite=True)
total2=total[['ALPHA_J2000','DELTA_J2000','NUMBER_i','NUMBER_r','NUMBER_g','XWIN_IMAGE_i','YWIN_IMAGE_i',\
'MAG_APER_i','MAGERR_APER_i','MAG_APER_g','MAGERR_APER_g','MAG_APER_r','MAGERR_APER_r','MAG_AUTO_i',\
'MAGERR_AUTO_i','MAG_AUTO_g','MAGERR_AUTO_g','MAG_AUTO_r','MAGERR_AUTO_r','MAG_SPHEROID_i',\
'MAGERR_SPHEROID_i','MAG_SPHEROID_g','MAGERR_SPHEROID_g','MAG_SPHEROID_r','MAGERR_SPHEROID_r',\
'CLASS_STAR_i','CLASS_STAR_g','CLASS_STAR_r','FLAGS_g','FLAGS_r','FLAGS_i','MAG_PSF_g',\
'MAG_PSF_r','MAG_PSF_i','MAGERR_PSF_g','MAGERR_PSF_r','MAGERR_PSF_i','MAG_MODEL_g','MAG_MODEL_r',\
'MAG_MODEL_i','MAGERR_MODEL_g','MAGERR_MODEL_r','MAGERR_MODEL_i','SPREAD_MODEL_g','SPREAD_MODEL_r',\
'SPREAD_MODEL_i','SPREADERR_MODEL_g','SPREADERR_MODEL_r','SPREADERR_MODEL_i']]
total2.write(os.path.join(slrdir, 'total_psf_%s.csv' % field), overwrite=True)
def pisco_cut_star(field,c_a,c_b,c_d,c_delta):
seeing=find_seeing_fits(field)
true_seeing=find_seeing(field,'i')
df_i=Table(fits.open('/Users/taweewat/Documents/pisco_code/star_galaxy/%s_catalog.fits'%field)[1].data).to_pandas()
df_isq=Table(fits.open('/Users/taweewat/Documents/pisco_code/star_galaxy/%s_sq_catalog.fits'%field)[1].data).to_pandas()
#cut the object out so that it has the same number of object between the sq catalog list and the psf mag list.
fname = "/Users/taweewat/Documents/pisco_code/slr_output/total_psf_%s.csv"%field
df0 = pd.read_csv(fname)
df0['NUMBER'] = np.arange(0, len(df0), 1).tolist()
cf_i=SkyCoord(ra=np.array(df_i['ALPHA_J2000'])*u.degree, dec=np.array(df_i['DELTA_J2000'])*u.degree)
cf_isq=SkyCoord(ra=np.array(df_isq['ALPHA_J2000'])*u.degree, dec=np.array(df_isq['DELTA_J2000'])*u.degree)
cf0=SkyCoord(ra=np.array(df0['ALPHA_J2000'])*u.degree, dec=np.array(df0['DELTA_J2000'])*u.degree)
df0.rename(columns={'ALPHA_J2000': 'ALPHA_J2000_i'}, inplace=True)
df0.rename(columns={'DELTA_J2000': 'DELTA_J2000_i'}, inplace=True)
idxn, d2dn, d3dn=cf0.match_to_catalog_sky(cf_i)
df_i_cut0=df_i.loc[idxn].copy()
df_i_cut0['NUMBER']=np.arange(0,len(df0),1).tolist()
df_i_cut=pd.merge(df_i_cut0,df0,on='NUMBER')
idxn, d2dn, d3dn=cf0.match_to_catalog_sky(cf_isq)
df_isq_cut0=df_isq.loc[idxn].copy()
df_isq_cut0['NUMBER']=np.arange(0,len(df0),1).tolist()
df_isq_cut=pd.merge(df_isq_cut0,df0,on='NUMBER')
fig,ax=plt.subplots(2,3,figsize=(15,10))
df_i0=df_i_cut[(df_i_cut.MAG_APER<0)&(df_isq_cut.MAG_APER<0)]
df_isq0=df_isq_cut[(df_i_cut.MAG_APER<0)&(df_isq_cut.MAG_APER<0)]# print len(df_i), len(df_isq)
# c_d=-7.5
df_i2=df_i0[(df_i0.CLASS_STAR>c_a) & (df_i0.MAG_APER<c_d)]# & (df_i0.MAG_APER>c_c)]
df_isq2=df_isq0[(df_i0.CLASS_STAR>c_a) & (df_i0.MAG_APER<c_d)]# & (df_i0.MAG_APER>c_c)];# print len(df_i2), len(df_isq2)
icut_per=np.percentile(df_i2.MAG_APER,35) #35
df_i3=df_i2[df_i2.MAG_APER>icut_per]
df_isq3=df_isq2[df_i2.MAG_APER>icut_per]
fit=np.polyfit(df_i3.MAG_APER, df_i3.MAG_APER-df_isq3.MAG_APER, 1)
f=np.poly1d(fit)
ax[0,0].plot(df_i2.MAG_APER,f(df_i2.MAG_APER),'--')
res=(df_i3.MAG_APER-df_isq3.MAG_APER)-f(df_i3.MAG_APER)
aa=np.abs(res)<1.5*np.std(res)
# outl=np.abs(res)>=1.5*np.std(res)
fit=np.polyfit(df_i3.MAG_APER[aa], df_i3.MAG_APER[aa]-df_isq3.MAG_APER[aa], 1)
f=np.poly1d(fit)
ax[0,0].axvline(icut_per,color='blue',label='35th quantile')
ax[0,0].errorbar(df_i2.MAG_APER,df_i2.MAG_APER-df_isq2.MAG_APER,yerr=np.sqrt(df_i2.MAGERR_APER**2+df_isq2.MAGERR_APER**2),fmt='o')
ax[0,0].set_title('only for star')
ax[0,0].plot(df_i2.MAG_APER,f(df_i2.MAG_APER),'--',label='no outlier')
ax[0,0].set_ylabel('MAG_APER-MAG_APER_sq')
ax[0,0].set_xlabel('MAG APER i')
#---> #0.1 default, 0.2
c_c=df_i2[f(df_i2.MAG_APER)-(df_i2.MAG_APER-df_isq2.MAG_APER)<0.1]['MAG_APER'].values\
[np.argmin(df_i2[f(df_i2.MAG_APER)-(df_i2.MAG_APER-df_isq2.MAG_APER)<0.1]['MAG_APER'].values)] #edit10/30 (previous 0.1)
#--->
ax[0,0].axvline(c_c,color='red',label='new upper cut')
ax[0,0].legend(loc='best')
# color_axis='CLASS_STAR'
color_axis='SPREAD_MODEL_i'
ax[0,1].scatter(df_i0.MAG_APER,df_i0.MAG_APER-df_isq0.MAG_APER,marker='.',c=df_i0[color_axis],vmin=0., vmax=0.005)
ax[0,1].plot(df_i3.MAG_APER,df_i3.MAG_APER-df_isq3.MAG_APER,'x')
ax[0,1].set_title('for all objects')
ax[0,1].set_ylabel('MAG_APER-MAG_APER_sq')
ax[0,1].set_xlabel('MAG APER i')
ax[0,1].axvline(c_b,ls='--')
ax[0,1].axvline(c_c,ls='--')
delta=(df_i0.MAG_APER-df_isq0.MAG_APER) - f(df_i0.MAG_APER)
ax[0,2].scatter(df_i0.MAG_APER,delta,marker='.',c=df_i0[color_axis],vmin=0., vmax=0.005)
ax[0,2].axhline(0,ls='--')
ax[0,2].axvline(c_c,ls='--')
ax[0,2].axvline(c_b,ls='--')
ax[0,2].set_ylabel('Delta')
ax[0,2].set_xlabel('MAG APER i')
ax[0,2].set_ylim(0.5,-1.2)
df_i1=df_i0[(df_i0.MAG_APER>c_c)&(df_i0.MAG_APER<c_b)].copy()
df_isq1=df_isq0[(df_i0.MAG_APER>c_c)&(df_i0.MAG_APER<c_b)].copy()
delta1=(df_i1.MAG_APER-df_isq1.MAG_APER) - f(df_i1.MAG_APER)
ax[1,0].scatter(df_i1.MAG_APER, delta1, marker='o', c=df_i1[color_axis],vmin=0., vmax=0.005)
ax[1,0].axhline(0,ls='--')
ax[1,0].axhline(c_delta, ls='--')
ax[1,0].set_ylabel('Delta')
ax[1,0].set_xlabel('MAG APER i')
ax[1,0].set_ylim(0.5,-2)
# deltag=delta1[delta1<c_delta] #galaxy 0.1, 0.2 (0.005), 0.5 ()
deltas=delta1[(delta1>=c_delta)&(delta1<3.)] #star
def gauss(x, *p):
A, mu, sigma = p
return A*np.exp(-(x-mu)**2/(2.*sigma**2))
p0 = [1., 0., 0.1]
# def gauss(x, *p):
# A, sigma = p
# return A*np.exp(-(x-0)**2/(2.*sigma**2))
# p0 = [1., 0.1]
#galaxy
# hist, bin_edges = np.histogram(deltag,bins=np.arange(-1.2,0.5,0.02))
hist, bin_edges = np.histogram(delta1,bins=np.arange(-1.2,0.5,0.02))
bin_centres = (bin_edges[:-1] + bin_edges[1:])/2
ax[1,1].plot(bin_centres, hist, label='galaxies',linestyle='steps')
#stars
hist, bin_edges = np.histogram(deltas,bins=np.arange(-1,0.5,0.02)) #(0 vs -1,0.5,0.02)
# hist, bin_edges = np.histogram(delta1, bins=np.arange(c_delta, 0.5, 0.02))
bin_centres = (bin_edges[:-1] + bin_edges[1:])/2
coeff2, var_matrix = curve_fit(gauss, bin_centres, hist, p0=p0)
ax[1,1].plot(bin_centres, hist, label='stars',linestyle='steps')
# hist, bin_edges = np.histogram(delta1,bins=np.arange(-1.2,0.5,0.02)) #added for right gaussian fitting
# bin_centres = (bin_edges[:-1] + bin_edges[1:])/2 # added for right gaussian fitting
x=np.arange(-1.25,0.5,0.02)
# hist_fit2 = gauss(x, *coeff2)
hist_fit2 = gauss(x, *coeff2)
hist_fit3 = gauss(x, *coeff2)/np.max(gauss(x, *coeff2)) #added for right gaussian fitting
ax[1,1].plot(x, hist_fit2, label='stars_fit')
ax[1,1].plot(x, hist_fit3, label='stars_fit_norm') #added for right gaussian fitting
ax[1,1].axvline(x[hist_fit3>star_cut][0],c='tab:pink',label='cut:%.3f'%x[hist_fit3>star_cut][0]) #added for right gaussian fitting
ax[1,1].legend(loc='best')
ax[1,1].set_xlabel('Delta')
ax[1,1].set_ylabel('Histogram')
ax[0,2].axhline(x[hist_fit3>star_cut][0],c='tab:pink') #added for right gaussian fitting
ax[1,0].axhline(x[hist_fit3>star_cut][0],c='tab:pink') #added for right gaussian fitting
ax[1,2].axhline(star_cut, c='tab:red') # added for right gaussian fitting
maxi=np.max(gauss(delta,*coeff2))
def prob_SG(delta,maxi,*coeff2):
if delta>0.:
return 0.
elif delta<=0.:
return 1. - (gauss(delta, *coeff2) / maxi)
vprob_SG= np.vectorize(prob_SG)
SG=1.-vprob_SG(delta1,maxi,*coeff2)
df_i1.loc[:,'SG']=SG
param_izp=read_param_izp('psf')
mag0=param_izp['i_zp_day9']#%dir_dict[find_fits_dir(field)[-9:]]]
axi = ax[1, 2].scatter(df_i1.MAG_APER + mag0, SG,
marker='.', c=df_i1[color_axis], vmin=0., vmax=0.005)
ax[1,2].axvline(aper_cut, ls='--', c='tab:blue')
ax[1,2].axhline(SG_upper, ls='--', c='tab:blue')
ax[1,2].set_ylim(-0.02,1.02)
ax[1,2].set_xlabel('MAG APER i')
ax[1,2].set_ylabel('SG (probability to be a star)')
plt.suptitle(field+' seeing vs true_seeing: '+str(seeing)+','+str(true_seeing))
fig.colorbar(axi)
plt.tight_layout(rect=[0, 0., 1, 0.98])
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_color_plots/star_galaxy_sep_12_all%s.png' % field, dpi=120)
plt.close(fig)
return df_i_cut, df_i1
def pisco_cut_frame(field):
# df_i=Table(fits.open('/Users/taweewat/Documents/pisco_code/star_galaxy/'+
# '%s_catalog.fits'%field)[1].data).to_pandas()
"""
c_a: CLASS_STAR lower limit for stars used for the linear fit
c_b, c_c: upper and lower limit for all objects selection
c_c can be moved with the for loop to include more objects until the confusion limit
c_d: Faintest magnitude for stars used for the linear fit
c_delta: lower limit for Delta to consider stars before fitting the gaussian and find SG (Star/Galaxy) factor
"""
global star_cut
global aper_cut
global SG_upper
star_cut=0.95
aper_cut=21.5
# aper_cut=22.0
SG_upper=0.02
seeing=find_seeing_fits(field)
true_seeing=find_seeing(field,'i')
if field=='Field179':
true_seeing=1.12
if field=='CHIPS1011-0505':
# c_a,c_b,c_c,c_d,c_delta=[0.95,-9.,-12.5,-9.2,-0.25]
c_a,c_b,c_c,c_d,c_delta=[0.95,-12.,-12.5,-13,-0.1]
elif field=='CHIPS2317-1443':
c_a,c_b,c_c,c_d,c_delta=[0.95,-11.,-12.5,-14,-0.05]
elif (field == 'Field137') or (field == 'Field071') or (field == 'Field109'):
# c_a,c_b,c_c,c_d,c_delta=[0.95,-8.,-11.5,-8.5,-0.2]
c_a,c_b,c_c,c_d,c_delta=[0.95,-12.,-12.5,-13,-0.15]
elif (field[0:3]=='PKS') or (field[0:4]=='SDSS'):
# c_a,c_b,c_c,c_d,c_delta=[0.95,-7,-12.2,-8.5,-0.1]
c_a, c_b, c_c, c_d, c_delta = [0.95, -11, -13, -13, -0.1]
elif field[0:5]=='CHIPS':
# c_a,c_b,c_c,c_d,c_delta=[0.95,-9.,-12.5,-11.8,-0.1] #-0.15 (default), -10
c_a,c_b,c_c,c_d,c_delta=[0.95,-11.,-12.2,-12.2,-0.1] #-0.15 (default), -10
elif field[0:5]=='Field':
# c_a,c_b,c_c,c_d,c_delta=[0.95,-8.,-11.5,-8.5,-0.1] #-8.5, -0.1 (Default)
c_a,c_b,c_c,c_d,c_delta=[0.95,-11,-12.5,-12.5,-0.1]
df_i_cut, df_i1=pisco_cut_star(field,c_a,c_b,c_d,c_delta)
while len(df_i1[(df_i1['MAG_APER']<c_b)&(df_i1['MAG_APER']>c_b-0.5)\
&(df_i1['SG']>0.2)&(df_i1['SG']<0.8)])<25: #8, 10 (default)
len_df_i1=len(df_i1)
c_b=c_b+0.5
df_i_cut, df_i1 = pisco_cut_star(field,c_a,c_b,c_d,c_delta)
if len_df_i1==len(df_i1):
break
def SG_cut(SG, aper, aper0):
if aper<aper0:
if SG <= SG_upper:
return True
else:
return False
else:
return True
param_izp=read_param_izp('psf')
mag0=param_izp['i_zp_day9']#%dir_dict[find_fits_dir(field)[-9:]]]
vSG_cut = np.vectorize(SG_cut)
vSGcut = vSG_cut(df_i1['SG'].values, df_i1['MAG_APER'].values+mag0, aper_cut)
dff=df_i1[vSGcut]
# dff=df_i1[df_i1['CLASS_STAR']<0.5]
# dff=df_i1[df_i1['SG']<star_cut] #0.8
vSGcut_star = vSG_cut(0.01, df_i1['MAG_APER'].values+mag0, aper_cut)
# dff_star = df_i1[(df_i1['SG'] > 0.9)] # no_2mass cut
##After running pisco_photometry_v4.fits, but before running
##pisco_photometry_psf_v4.fits, and then 19_pisco_tilt_resequence.ipynb
# fname = "/Users/taweewat/Documents/pisco_code/slr_output/total_psf_%s.csv"%field
# df0 = pd.read_csv(fname)
# df0['NUMBER']=np.arange(0,len(df0),1).tolist() #add NUMBER parameter to match between cut catalog and the total catalog
# df0.rename(columns={'ALPHA_J2000': 'ALPHA_J2000_i'}, inplace=True)
# df0.rename(columns={'DELTA_J2000': 'DELTA_J2000_i'}, inplace=True)
# print len(df0), len(df_i_cut), len(dff), len(dff_star), '=', seeing, 'vs [NEW]', true_seeing
# if len(df0)!=len(df_i_cut):
# raise ValueError('the two tables do not have the same length')
# dff0=pd.merge(dff,df0,on='NUMBER')
##Using SPREAD_MODEL to seperate star/galaxies
fname = "/Users/taweewat/Documents/pisco_code/slr_output/total_psf_%s.csv"%field
df0 = pd.read_csv(fname)
df0['NUMBER'] = np.arange(0, len(df0), 1).tolist()
df0.rename(columns={'ALPHA_J2000': 'ALPHA_J2000_i'}, inplace=True)
df0.rename(columns={'DELTA_J2000': 'DELTA_J2000_i'}, inplace=True)
#EXTENDED_COADD: 0 star, 1 likely star, 2 mostly galaxies, 3 galaxies
# df0['EXTENDED_COADD']=np.array(((df0['SPREAD_MODEL_i']+ 3*df0['SPREADERR_MODEL_i'])>0.005).values, dtype=int)+\
# np.array(((df0['SPREAD_MODEL_i']+df0['SPREADERR_MODEL_i'])>0.003).values, dtype=int)+\
# np.array(((df0['SPREAD_MODEL_i']-df0['SPREADERR_MODEL_i'])>0.003).values, dtype=int)
# dff=df0[df0['EXTENDED_COADD']>1]
# dff_star=df0[df0['EXTENDED_COADD']<2]
dff=df0[(df0['SPREAD_MODEL_i'])>0.005]
dff_star=df0[np.abs(df0['SPREAD_MODEL_i'])<0.004]
# dff_star=df0[np.abs(df0['SPREAD_MODEL_i'])<0.004] #+5/3.*df0['SPREADERR_MODEL_i'] <0.002
dff0=dff
dff0.to_csv("/Users/taweewat/Documents/pisco_code/slr_output/"+\
"galaxy_psf_total_%s.csv"%field)
# dff_star0=pd.merge(dff_star, df0, on='NUMBER') # for non-SPREAD_MODEL
dff_star0=dff_star #for SPREAD_MODEL
dff_star0.to_csv("/Users/taweewat/Documents/pisco_code/slr_output/"+\
"star_psf_total_%s.csv"%field)
def panstar_cut_star(field):
##Using SPREAD_MODEL to seperate star/galaxies
fname = "/Users/taweewat/Documents/pisco_code/slr_output/total_psf_%s.csv"%field
df0 = pd.read_csv(fname)
df0['NUMBER'] = np.arange(0, len(df0), 1).tolist()
df0.rename(columns={'ALPHA_J2000': 'ALPHA_J2000_i'}, inplace=True)
df0.rename(columns={'DELTA_J2000': 'DELTA_J2000_i'}, inplace=True)
dfi=df0[df0['MAG_AUTO_i']<-16]
x=dfi['MAG_AUTO_i']
y=dfi['SPREAD_MODEL_i']
p_spread=np.poly1d(np.polyfit(x,y,1))
xs=np.arange(np.min(df0['MAG_AUTO_i']),np.max(df0['MAG_AUTO_i']),0.01)
fig=plt.figure(figsize=(8,4))
plt.subplot(1,2,1)
plt.plot(df0['MAG_AUTO_i'],df0['SPREAD_MODEL_i'],'.',alpha=0.5)
plt.plot(x,y,'.',alpha=0.5)
plt.plot(xs,p_spread(xs))
plt.axhline(0.005,color='tab:orange')
plt.ylim(-0.1, 0.1)
plt.subplot(1,2,2)
plt.plot(df0['MAG_AUTO_i'],df0['SPREAD_MODEL_i']-p_spread(df0['MAG_AUTO_i']),'.',alpha=0.5)
plt.axhline(0.005,color='tab:orange')
plt.ylim(-0.1, 0.1)
plt.tight_layout()
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_color_plots/spread_model_i_fit_%s_%s.png' %
(mode, field), dpi=120)
plt.close(fig)
df0['SPREAD_MODEL_i2']=df0['SPREAD_MODEL_i']-p_spread(df0['MAG_AUTO_i'])
#EXTENDED_COADD: 0 star, 1 likely star, 2 mostly galaxies, 3 galaxies
# df0['EXTENDED_COADD']=np.array(((df0['SPREAD_MODEL_i']+ 3*df0['SPREADERR_MODEL_i'])>0.005).values, dtype=int)+\
# np.array(((df0['SPREAD_MODEL_i']+df0['SPREADERR_MODEL_i'])>0.003).values, dtype=int)+\
# np.array(((df0['SPREAD_MODEL_i']-df0['SPREADERR_MODEL_i'])>0.003).values, dtype=int)
# dff=df0[df0['EXTENDED_COADD']>1]
# dff_star=df0[df0['EXTENDED_COADD']<2]
df1=df0[df0['FLAGS_i']<4].copy()
dff=df1[(df1['SPREAD_MODEL_i2'])>0.0035]
# dff_star=df0[(df0['MAG_AUTO_i']<-8) & (df0['SPREAD_MODEL_i']<0.10)] #+5/3.*df0['SPREADERR_MODEL_i'] <0.002
dff_star=df0[(df0['SPREAD_MODEL_i2']<0.004)&(df0['MAG_AUTO_i']<-16)&(df0['MAG_AUTO_i']>-18.5)]
# dff_star=df0[np.abs(df0['SPREAD_MODEL_i2'])<0.003]
# dff_star=df0[df0['CLASS_STAR_i']>0.9]
dff.to_csv("/Users/taweewat/Documents/pisco_code/slr_output/galaxy_psf_total_%s.csv"%field)
dff_star.to_csv("/Users/taweewat/Documents/pisco_code/slr_output/star_psf_total_%s.csv"%field)
def pisco_photometry_psf_v4(field, mode='psf', mode2mass='', slr=True): #mode2mass: '' vs '_no2mass'
def slr_running_psf(field, infile="None", mode="psf", mode2mass='', bigmacs="pisco_pipeline/big-macs-calibrate-master"):
"""
slr_running: running SLR script from github.com/patkel/big-macs-calibrate to get a calibrated magnitude
INPUT:
- field: object of interset e.g., 'Field026'
- bigmacs: the location for "big-macs-calibrate" directoty
OUTPUT:
- a new table with added columns with name MAG_g,...,MAGERR_g,...
"""
slrdir = 'slr_output'
pyfile = os.path.join(bigmacs, 'fit_locus.py')
# cmd = "python %s --file %s --columns %s --extension 1 --bootstrap 15 -l -r ALPHA_J2000_i -d DELTA_J2000_i -j --plot=PLOTS_%s_%s" \
# % (pyfile, infile, os.path.join(bigmacs, "coadd_mag_sex_%s%s.columns"%(mode,'')), mode, field)
if mode2mass=='':
cmd = "python %s --file %s --columns %s --extension 1 --bootstrap 15 -l -r ALPHA_J2000_i -d DELTA_J2000_i -j --plot=PLOTS_%s_%s" \
% (pyfile, infile, os.path.join(bigmacs, "coadd_mag_sex_%s%s.columns"%(mode,mode2mass)), mode, field) #'' vs '_no2mass'
elif mode2mass=='_no2mass':
cmd = "python %s --file %s --columns %s --extension 1 --bootstrap 15 -l -r ALPHA_J2000_i -d DELTA_J2000_i --plot=PLOTS_%s_%s" \
% (pyfile, infile, os.path.join(bigmacs, "coadd_mag_sex_%s%s.columns"%(mode,mode2mass)), mode, field) #'' vs '_no2mass'
print cmd
subprocess.check_call(shlex.split(cmd))
def update_color(fname, table, mode='psf'):
"""
update_color: using the output from SLR, update to the correct magnitude
INPUT:
- fname: input file from SLR output (...offsets.list)
- table: the table that we want to update the value (from column magg,etc to MAG_g,etc)
OUTPUT:
- a new table with added columns with name MAG_g,...,MAGERR_g,...
"""
print fname
with open(fname) as f:
content = f.readlines()
content = [x.strip() for x in content]
# print content
# if len(content)==8:
# red_content=content[4:]
# elif len(content)==10:
# red_content=content[5:-1]
if len(content)==7:
red_content=content[4:]
elif len(content)==9:
red_content=content[5:-1]
band = [x.split(' ')[0][-1] for x in red_content]
corr = [float(x.split(' ')[1]) for x in red_content]
ecorr = [float(x.split(' ')[3]) for x in red_content]
print 'bands = ', band
if mode=='psf':
MODE1='PSF'
elif mode=='model':
MODE1='MODEL'
elif mode=='auto':
MODE1='AUTO'
elif mode=='aper':
MODE1='APER'
elif mode=='hybrid':
MODE1='HYBRID'
table['MAG_' + band[0]] = table['MAG_%s_'%MODE1 + band[0]] + corr[0]
table['MAG_' + band[1]] = table['MAG_%s_'%MODE1 + band[1]] + corr[1]
table['MAG_' + band[2]] = table['MAG_%s_'%MODE1 + band[2]] + corr[2]
# table['MAG_' + band[3]] = table['MAG_%s_'%MODE1 + band[3]] + corr[3]
table['MAGERR_' + band[0]] = (table['MAGERR_%s_'%MODE1 + band[0]]**2)**0.5# + ecorr[0]**2)**0.5
table['MAGERR_' + band[1]] = (table['MAGERR_%s_'%MODE1 + band[1]]**2)**0.5# + ecorr[1]**2)**0.5
table['MAGERR_' + band[2]] = (table['MAGERR_%s_'%MODE1 + band[2]]**2)**0.5# + ecorr[2]**2)**0.5
# table['MAGERR_' + band[3]] = (table['MAGERR_%s_'%MODE1 + band[3]]**2)# + ecorr[3]**2)**0.5
# table['MAGERR_' + band[0]] = (table['MAGERR_%s_'%MODE1 + band[0]]**2)**0.5
# table['MAGERR_' + band[1]] = (table['MAGERR_%s_'%MODE1 + band[1]]**2)**0.5
# table['MAGERR_' + band[2]] = (table['MAGERR_%s_'%MODE1 + band[2]]**2)**0.5
# table['MAGERR_' + band[3]] = (table['MAGERR_%s_'%MODE1 + band[3]]**2)**0.5
return table
slrdir = 'slr_output'
total3 = Table.from_pandas(pd.read_csv(
"/Users/taweewat/Documents/pisco_code/slr_output/star_psf_total_%s.csv" % field)) # star_psf_total_gaia
total3=total3[['NUMBER','ALPHA_J2000_i','DELTA_J2000_i','XWIN_IMAGE_i','YWIN_IMAGE_i',\
'MAG_APER_i','MAGERR_APER_i','MAG_APER_g','MAGERR_APER_g','MAG_APER_r',\
'MAGERR_APER_r','MAG_AUTO_i','MAGERR_AUTO_i',\
'MAG_AUTO_g','MAGERR_AUTO_g','MAG_AUTO_r','MAGERR_AUTO_r','MAG_SPHEROID_i','MAGERR_SPHEROID_i','MAG_SPHEROID_g',\
'MAGERR_SPHEROID_g','MAG_SPHEROID_r','MAGERR_SPHEROID_r','CLASS_STAR_i','CLASS_STAR_g','CLASS_STAR_r',\
'FLAGS_g','FLAGS_r','FLAGS_i','MAG_PSF_g',\
'MAG_PSF_r','MAG_PSF_i','MAGERR_PSF_g','MAGERR_PSF_r',\
'MAGERR_PSF_i','MAG_MODEL_g','MAG_MODEL_r',\
'MAG_MODEL_i','MAGERR_MODEL_g','MAGERR_MODEL_r',\
'MAGERR_MODEL_i','SPREAD_MODEL_g','SPREAD_MODEL_r',\
'SPREAD_MODEL_i','SPREADERR_MODEL_g','SPREADERR_MODEL_r',\
'SPREADERR_MODEL_i']]
print 'number of stars =', len(total3)
if (mode2mass==''):# and (mode=='psf'):
starpsfmode = '_psf'
# elif (mode2mass=='') and (mode=='model'):
# starpsfmode = '_psf' #'_model'
elif (mode2mass=='_no2mass'):# and (mode=='model'):
starpsfmode ='_no2mass' #'_model_no2mass'
# elif (mode2mass == '_no2mass') and (mode=='psf'):
# starpsfmode = '_no2mass'
# total3.write(slrdir+'/star_psf%s_%s_%i.fits' % ('_psf',field,0), overwrite=True) #with 2MASS stars: star_psf_psf_%s_%i.fits
total3.write(slrdir + '/star_psf%s_%s_%i.fits' % (starpsfmode, field, 0),
overwrite=True) # no 2MASS star mode vs , '_psf' vs '_no2mass'
if slr:
slr_running_psf(field, infile=slrdir + '/star_psf%s_%s_%i.fits' %
(starpsfmode, field, 0), mode='psf', mode2mass=mode2mass) # '_psf' vs '_no2mass'
total_gal=Table.from_pandas(pd.read_csv("/Users/taweewat/Documents/pisco_code/slr_output/galaxy_psf_total_%s.csv"%(field)))
print 'mode=', mode, '/star_psf%s_%s_%i.fits.offsets.list' % (starpsfmode, field, 0)
ntotal_gal = update_color(slrdir + '/star_psf%s_%s_%i.fits.offsets.list' %
(starpsfmode, field, 0), total_gal, mode=mode) # '' vs '_no2mass
ntotal_gal.write(os.path.join(
slrdir, 'galaxy_%s%s_ntotal_%s.csv' % (mode, mode2mass, field)), overwrite=True) # '' vs '_no2mass'
def make_images(field,ax=None):
dir='/Users/taweewat/Documents/pisco_code/Chips_images/'
try:
ax.imshow(image.imread(dir+"aplpy_panstar_%s_img4.jpeg"%field))
except:
ax.imshow(image.imread(dir+"aplpy_panstar_%s_img.jpeg"%field))
# ax.imshow(image.imread(dir+"aplpy4_%s_img4.jpeg"%field))
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
ax.axis('off')
return None
# def sur_pro(r): #Mpc
# def fn(x):
# if x>=1:
# return 1.-(2/np.sqrt(x**2-1)*np.arctan(np.sqrt((x-1.)/(x+1.))))
# elif x<1:
# return 1.-(2/np.sqrt(1-x**2)*np.arctanh(np.sqrt((1.-x)/(x+1.))))
# rs=0.15/0.71 #Mpc
# if r>=(0.1/0.71):
# return 1/((r/rs)**2-1)*fn(r/rs)
# elif r<(0.1/0.71):
# return 1./(((0.1/0.71)/rs)**2-1)*fn((0.1/0.71)/rs)
# def k_NFW():
# def integrated(y):
# return 1./integrate.quad(lambda r: 2*np.pi*r*sur_pro(r),0,y)[0]
# xy=np.logspace(-3,3,num=30)
# X = np.log(xy)
# Y = np.log([integrated(np.e**(y)) for y in X])
# Z=np.polyfit(X,Y,6)
# k_NFW = np.poly1d(Z)
# return k_NFW
# def sur_pro_prob(r,rc,k_NFW): #(Mpc,Mpc) # Weighted based on the distance from the center (Rykoff+12)
# return np.e**(k_NFW(np.log(rc)))*sur_pro(r)
def sur_pro(r,rc): #(arcmin)
def fn(x):
if x>=1:
return 1.-(2/np.sqrt(x**2-1)*np.arctan(np.sqrt((x-1.)/(x+1.))))
elif x<1:
return 1.-(2/np.sqrt(1-x**2)*np.arctanh(np.sqrt((1.-x)/(x+1.))))
rs=0.15/0.71 #Mpc
if r>=rc:
return 1/((r/rs)**2-1)*fn(r/rs)
elif r<rc:
return 1./((rc/rs)**2-1)*fn(rc/rs)
def sur_pro_prob_ang(r,rc):
return sur_pro(r,rc)/sur_pro(0.2,rc)
# def sur_pro_prob_ang(r,r_c):
# if r < r_c:
# return 1.
# else:
# return 0.
# 'ezmodel2_bc03_zf3.0_chab_0.02_exp_0.1.txt', 'ezmodel2_c09_zf3.0_chab_0.02_exp_0.1.txt'
name=['z','dist','age','mass','Abs_g','App_g','kcorr_g','Abs_r',\
'App_r','kcorr_r','Abs_i','App_i','kcorr_i','Abs_z','App_z','kcorr_z']
df=pd.read_csv('/Users/taweewat/Documents/red_sequence/rsz/model/'+\
# 'ezmodel2_bc03_zf2.5_chab_0.016_exp_0.1.txt',
'ezmodel2_bc03_zf2.5_chab_0.02_exp_0.1.txt',
# 'ezmodel2_c09_zf3.0_chab_0.02_exp_0.1.txt',
skiprows=27,delim_whitespace=True,names=name)
df=df[(df.z>=0.1) & (df.z<1.)]
z_new=np.arange(0.1, 0.95, 0.0025)
Appi_new = interpolate.splev(z_new, interpolate.splrep(df.z, df.App_i, s=0), der=0)
Appi_f = interpolate.interp1d(df.z, df.App_i, kind='cubic')
#all extra options
extra_name= 'gremove_silk_zf3_c09_noebv_model_complete_gaia' #'gremove_lum_silk_zf2.5_c09_11', 'gremove_silk_zf3_c09_noebv_model_complete_no2mass'
gremove = False # remove non-detect g objects from the list
duplicate = False # remove duplicate redshift (uncertain)
colorerr = True # add redshift with color_error taken into account
transparent = True # make transparent plot for flip book
img_filp = False # make image flip from transparent
img_redshift = True # make image with redshift for each object
def linear_rmi(x0,redshift):
x=df.z[:-11] #-12
y=(df.App_r-df.App_i)[:-11] #-12
yhat = np.polyfit(x, y, 5) #5 vs 9
f_rmi = np.poly1d(yhat)
slope=-0.0222174237562*1.007
# Appi0=Appi_new[np.where(abs(z_new-redshift)<=1e-9)[0][0]]
Appi0=Appi_f(redshift)
return slope*(x0-Appi0)+f_rmi(redshift)
def linear_gmr(x0,redshift):
x=df.z[:-24] #-25
y=(df.App_g-df.App_r)[:-24] #-25
yhat = np.polyfit(x, y, 5)
f_gmr = np.poly1d(yhat)
slope=-0.0133824600874*1.646
# Appi0=Appi_new[np.where(abs(z_new-redshift)<=1e-9)[0][0]]
Appi0=Appi_f(redshift)
return slope*(x0-Appi0)+f_gmr(redshift)
def linear_gmi(x0,redshift):
x=df.z[:-9]
y=(df.App_g-df.App_i)[:-9]
yhat = np.polyfit(x, y, 5)
f_gmi = np.poly1d(yhat)
Appi0=Appi_f(redshift)
slope = -0.04589707934164738 * 1.481
return slope*(x0-Appi0)+f_gmi(redshift)
def find_fits_dir(field):
home = '/Users/taweewat/Documents/pisco_code/'
dirs = ['ut170103/', 'ut170104/', 'ut170619/', 'ut170621/',\
'ut170624/', 'ut171208/', 'ut171209/', 'ut171212/']
myReg = re.compile(r'(%s_A).*' % field)
for di in dirs:
diri = home + di
for text in os.listdir(diri):
if myReg.search(text) != None:
# filename = myReg.search(text).group()
allfilename = diri
allfilename='ut170103/'
return allfilename
dir_dict = dict(zip(['ut170103/','ut170104/','ut170619/',\
'ut170621/','ut170624/','ut171208/','ut171209/','ut171212/'], np.arange(1, 9)))
def find_ra_dec(field):
if field == 'PKS1353':
RA = 209.0225
DEC = -34.3530556
redshift = 0.223
elif field == 'CHIPS2249-2808':
RA = 336.99975202151825
DEC = -43.57623068466675
redshift = -1
elif field == 'CHIPS2246-2854':
RA = 335.7855174238757
DEC = -34.934569299688185
redshift = -1
elif field[0:5] == 'Field':
base = pd.read_csv(
'/Users/taweewat/Dropbox/Documents/MIT/Observation/2017_1/all_objs.csv')
RA = base[base.name == field].ra.values[0]
DEC = base[base.name == field].dec.values[0]
redshift = base[base.name == field].redshift.values[0]
elif field[0:5] == 'CHIPS':
base = pd.read_csv(
'/Users/taweewat/Documents/red_sequence/chips_all_obj.csv', index_col=0)
RA = base[base.chips == field].ra.values[0]
DEC = base[base.chips == field].dec.values[0]
redshift = base[base.chips == field].redshift.values[0]
elif field[0:4] == 'SDSS':
base = pd.read_csv(
'/Users/taweewat/Documents/xray_project/ned-result/final_sdss_cut5.csv', index_col=0)
RA = base[base.name == field].RA.values[0]
DEC = base[base.name == field].DEC.values[0]
redshift = base[base.name == field].redshift.values[0]
return RA, DEC, redshift
def pisco_tilt_resequence(field, mode='psf', mode2mass=''):
RA, DEC, redshift = find_ra_dec(field)
if redshift!=-1:
qso_redshift=redshift
else:
qso_redshift=0.2
print 'RA', RA
print 'DEC', DEC
ebv = ebvpy.calc_ebv(ra=[RA],dec=[DEC]); print 'ebv:', ebv[0]
# ebv_g=ebvpy.calc_color_correction('g', ebv)[0]
# ebv_r=ebvpy.calc_color_correction('r', ebv)[0]
# ebv_i=ebvpy.calc_color_correction('i', ebv)[0]
# ebv_z=0.0
ebv_g,ebv_r,ebv_i,ebv_z=0.0,0.0,0.0,0.0 #no longer use reddening correction because it is already included in SLR
print 'ebv_g:', ebv_g, 'ebv_r:', ebv_r, 'ebv_i:', ebv_i
param_izp=read_param_izp(mode) #i zero point
# fname = "/Users/taweewat/Documents/pisco_code/slr_output/galaxy_ntotal_%s.csv"%field
fname = "/Users/taweewat/Documents/pisco_code/slr_output/galaxy_%s%s_ntotal_%s.csv" % (
mode, mode2mass, field) # '' vs '_no2mass'
df0 = pd.read_csv(fname,index_col=0)
# gremove=True
if gremove:
nog=len(df0[df0['MAG_PSF_g'] >= 50.]); print "no g detected:", nog
df0 = df0[df0['MAG_PSF_g'] < 50.].copy() # cut out not detected objects in g band
else:
nog=0
c5 = SkyCoord(ra=df0['ALPHA_J2000_i'].values*u.degree, dec=df0['DELTA_J2000_i'].values*u.degree)
c0 = SkyCoord(ra=RA*u.degree, dec=DEC*u.degree)
sep = c5.separation(c0)
df0['sep(deg)']=sep
df0['sep(Mpc)']=sep*60.*cosmo.kpc_proper_per_arcmin(qso_redshift).value/1e3
cut=df0
dfi = cut#.drop_duplicates(subset=['XWIN_WORLD', 'YWIN_WORLD'], keep='first').copy()
print 'duplicates:', len(df0), len(dfi)
# Added Galactic Reddening (6/16/18)
if mode2mass == '':
dfi['MAG_i']=dfi['MAG_i']-ebv_i
dfi['MAG_g']=dfi['MAG_g']-ebv_g
dfi['MAG_r']=dfi['MAG_r']-ebv_r
# Use i Zero Point from each day and g,r zero point fron the color (6/22/18)
elif mode2mass == '_no2mass':
dfi['MAG_i']=dfi['MAG_i']-ebv_i+param_izp['i_zp_day9']#%dir_dict[find_fits_dir(field)[-9:]]]
dfi['MAG_g']=dfi['MAG_g']-ebv_g+param_izp['i_zp_day9']#%dir_dict[find_fits_dir(field)[-9:]]]
dfi['MAG_r']=dfi['MAG_r']-ebv_r+param_izp['i_zp_day9']#%dir_dict[find_fits_dir(field)[-9:]]]
# dfi['MAG_z']=dfi['MAG_z']-ebv_z+param_izp['i_zp_day%i'%dir_dict[find_fits_dir(field)[-9:]]]
# dfi['MAGERR_i']=np.sqrt(dfi['MAGERR_i']**2-(99**2))
# dfi=dfi[dfi['MAG_i']<21.5].copy()
# dfi=dfi[dfi.MAGERR_g<0.5]
# dfi=dfi[(dfi.MAG_g<100)&(dfi.MAG_i<100)&(dfi.MAG_r<100)]
# dfi=dfi[(dfi.FLAGS_g<5)&(dfi.FLAGS_r<5)&(dfi.FLAGS_i<5)&(dfi.FLAGS_z<5)]
print field, qso_redshift, df0.shape, cut.shape, dfi.shape, dfi['sep(deg)'].max(), dfi['sep(Mpc)'].max()
norm = matplotlib.colors.Normalize(vmin=0.15,vmax=0.675)
c_m = matplotlib.cm.cool
s_m = matplotlib.cm.ScalarMappable(cmap=c_m, norm=norm)
s_m.set_array([])
I=np.arange(16,24,0.01)
dfi.loc[:,"z_gmr"] = np.nan
dfi.loc[:,"z_rmi"] = np.nan
dfi.loc[:,"w_gmr"] = np.nan
dfi.loc[:,"w_rmi"] = np.nan
dfi.loc[:,"w_col_gmr"] = np.nan
dfi.loc[:,"w_col_rmi"] = np.nan
# dfi.loc[:,"z_gmi"] = np.nan
# dfi.loc[:,"w_gmi"] = np.nan
# dfi.loc[:,"w_col_gmi"] = np.nan
# k_NFW0=k_NFW()
bin_width=0.035 #0.025
bins_gmr_cen = np.arange(0.12315, 0.33315+0.01, bin_width)
bins_gmr_edge = np.arange(0.10565, 0.35065 + 0.01, bin_width)
# bins_gmr_cen = np.arange(0.15815, 0.33315+0.01, bin_width) # bins_gmr_cen = np.arange(0.15, 0.325+0.01, bin_width)
# bins_gmr_edge = np.arange(0.14065, 0.35065+0.01, bin_width) # bins_gmr_edge = np.arange(0.1325, 0.3425+0.01, bin_width)
bins_rmi_cen = np.arange(0.36815, 0.64815+0.01, bin_width) # bins_rmi_cen = np.arange(0.36, 0.675+0.01, bin_width)
bins_rmi_edge = np.arange(0.35065, 0.66565+0.01, bin_width) # bins_rmi_edge = np.arange(0.3425, 0.6925+0.01, bin_width)
z_rmi,w_rmi,w_col_rmi=[],[],[]
for i, row in dfi.iterrows():
for z in bins_rmi_cen:
# if row['MAG_i'] < -18+5.*np.log10(ex.d_L(z)*1e6)-5.:
# if row['MAG_i'] < magi_cut_rmi:
# if np.sqrt(row['MAGERR_r']**2+row['MAGERR_i']**2)<0.134: #np.mean(f_rmi(x+0.07)-f_rmi(x))
if np.sqrt(row['MAGERR_r']**2+row['MAGERR_i']**2)<0.067*4.5:#1.5: #0.067*1.5
# if np.sqrt(row['MAGERR_r']**2+row['MAGERR_i']**2)<1:
rmi=row['MAG_r']-row['MAG_i']
# rmierr=np.sqrt(row['MAGERR_r']**2+row['MAGERR_i']**2)
low_edge=linear_rmi(row['MAG_i'],round(z-0.0175,4)) #0.0125
high_edge=linear_rmi(row['MAG_i'],round(z+0.0175,4)) #0.0125
if (rmi > low_edge) & (rmi <= high_edge):
# if (np.sqrt(row['MAGERR_r']**2+row['MAGERR_i']**2) < 3.5*(high_edge-low_edge)):
z_rmi.append(round(z,3))
# wrmi0=sur_pro_prob(row['sep(Mpc)'],1.,k_NFW0)
wrmi0=sur_pro_prob_ang(row['sep(deg)']*60, 1); w_rmi.append(wrmi0) #arcmin
# w_col_rmi0=scipy.stats.norm(rmi,rmierr).cdf(high_edge)-scipy.stats.norm(rmi,rmierr).cdf(low_edge); w_col_rmi.append(w_col_rmi0)
w_col_rmi0=1.; w_col_rmi.append(w_col_rmi0)
dfi.loc[i,"z_rmi"]=z
dfi.loc[i,"w_rmi"]=wrmi0
dfi.loc[i,"w_col_rmi"]=w_col_rmi0
z_gmr,w_gmr,w_col_gmr=[],[],[]
for i, row in dfi.iterrows():
for z in bins_gmr_cen:
# if row['MAG_i'] < -18+5.*np.log10(ex.d_L(z)*1e6)-5.:
# if row['MAG_i'] < magi_cut_gmr:
# if np.sqrt(row['MAGERR_g']**2+row['MAGERR_r']**2)<0.165: #np.mean(f_gmr(x+0.07)-f_gmr(x))
if np.sqrt(row['MAGERR_g']**2+row['MAGERR_r']**2)<0.0825*4.5:#1.5: #0.0825*1.5
# if np.sqrt(row['MAGERR_g']**2+row['MAGERR_r']**2)<1:
gmr=row['MAG_g']-row['MAG_r']
# gmrerr=np.sqrt((row['MAGERR_g'])**2+row['MAGERR_r']**2) #add factor 2.2 to reduce the g error to be similar to other bands
low_edge=linear_gmr(row['MAG_i'],round(z-0.0175,4)) #0.0125
high_edge=linear_gmr(row['MAG_i'],round(z+0.0175,4)) #0.0125
if (gmr > low_edge) & (gmr <= high_edge):
# if (np.sqrt(row['MAGERR_g']**2+row['MAGERR_r']**2) < 3.5*(high_edge-low_edge)):
z_gmr.append(round(z,3))
# w_col_gmr0=scipy.stats.norm(gmr,gmrerr).cdf(high_edge)-scipy.stats.norm(gmr,gmrerr).cdf(low_edge); w_col_gmr.append(w_col_gmr0)
w_col_gmr0=1.; w_col_gmr.append(w_col_gmr0)
# wgmr0=sur_pro_prob(row['sep(Mpc)'],1.,k_NFW0); w_gmr.append(wgmr0)
wgmr0 = sur_pro_prob_ang(row['sep(deg)'] * 60, 1); w_gmr.append(wgmr0) # arcmin
dfi.loc[i,"z_gmr"]=z
dfi.loc[i,"w_gmr"]=wgmr0
dfi.loc[i,"w_col_gmr"]=w_col_gmr0
# z_gmi,w_gmi,w_col_gmi=[],[],[]
# for i, row in dfi.iterrows():
# # for z in np.arange(0.15,0.35,0.025):
# for z in np.arange(0.15,0.7,0.035):
# if row['MAG_i'] < -18+5.*np.log10(ex.d_L(z)*1e6)-5.:
# gmi=row['MAG_g']-row['MAG_i']
# gmierr=np.sqrt((row['MAGERR_g']/2.2)**2+row['MAGERR_i']**2) #add factor 2.2 to reduce the g error to be similar to other bands
# low_edge=linear_gmi(row['MAG_i'],round(z-0.0175,4)) #0.0125
# high_edge=linear_gmi(row['MAG_i'],round(z+0.0175,4)) #0.0125
# if (gmi > low_edge) & (gmi <= high_edge):
# # if (np.sqrt(row['MAGERR_g']**2+row['MAGERR_r']**2) < 3.5*(high_edge-low_edge)):
# z_gmi.append(round(z,3))
# # w_col_gmi0=scipy.stats.norm(gmi,gmierr).cdf(high_edge)-scipy.stats.norm(gmi,gmierr).cdf(low_edge); w_col_gmi.append(w_col_gmi0)
# w_col_gmi0=1.; w_col_gmi.append(w_col_gmi0)
# # wgmi0=sur_pro_prob(row['sep(Mpc)'],1.,k_NFW0); w_gmi.append(wgmi0)
# wgmi0 = sur_pro_prob_ang(row['sep(deg)'] * 60, 1.); w_gmi.append(wgmi0) # arcmin
# dfi.loc[i,"z_gmi"]=z
# dfi.loc[i,"w_gmi"]=wgmi0
# dfi.loc[i,"w_col_gmi"]=w_col_gmi0
# ns1,xs1=np.histogram(z_gmr,bins=np.arange(0.125,0.35,0.025),weights=w_gmr)
ns1,xs1=np.histogram(z_gmr,bins=bins_gmr_edge,weights=np.array(w_gmr)*np.array(w_col_gmr)) #0.15-0.325
bin_cen1 = (xs1[:-1] + xs1[1:])/2
# ns2,xs2=np.histogram(z_rmi,bins=np.arange(0.325,0.7,0.025),weights=w_rmi)
ns2,xs2=np.histogram(z_rmi,bins=bins_rmi_edge,weights=np.array(w_rmi)*np.array(w_col_rmi)) #0.36-0.675
bin_cen2 = (xs2[:-1] + xs2[1:])/2
# z_total=np.append(xs1[:-1],xs2[:-1])
z_total=np.append(bin_cen1, bin_cen2)
n_total=np.append(ns1,ns2)
z_max=z_total[np.where(n_total==np.max(n_total))[0][0]]
n_median = np.median(n_total[n_total != 0])
n_mean = np.mean(n_total)
n_bkg = np.mean(sorted(n_total)[2:-2]);
z_total_added = np.insert(
np.append(z_total, z_total[-1] + bin_width), 0, z_total[0] - bin_width)
n_total_added = np.insert(np.append(n_total, 0), 0, 0) - n_bkg
# print 'n_total_added', n_total_added
lumfn=pd.read_csv('/Users/taweewat/Documents/red_sequence/coma_cluster_luminosity_function/schecter_fn.csv',\
names=['M_r','theta(M)Mpc^-3'])
h=0.7
x=lumfn['M_r']+5*np.log10(h);
y=lumfn['theta(M)Mpc^-3']*(h**3)
f1d=interp1d(x, y,kind='cubic')
def lum_function(M):
alpha = -1.20
Nb = np.log(10) / 2.5 * 0.002 * (70 / 50.)**3
Mb_s = -21. + 5 * np.log10(70 / 50.)
return Nb * (10.**(0.4 * (alpha + 1) * (Mb_s - M))) * np.exp(-10.**(0.4 * (Mb_s - M)))
def distance(z):
return cosmo.luminosity_distance(z).value
def abs_mag(m, z):
return m - 5 * np.log10(distance(z) * 1e6) + 5
def NFW_profile(r):
rs = 1. # Mpc
rho0 = 500.
return rho0 / (r / rs * (1 + r / rs)**2)
lum_fn = lambda z: integrate.quad( f1d, -23.455, abs_mag(22.25, z))[0]
lum_vfn = np.vectorize(lum_fn)
dense_fn = lambda z: integrate.quad(NFW_profile,0.001,cosmo.kpc_proper_per_arcmin(z).value/1e3)[0]
dense_vfn = np.vectorize(dense_fn)
n_total_adj=n_total_added #/(lum_vfn(z_total_added)*dense_vfn(z_total_added)) (adjusted the peak before picking it)
print 'n_total_added:', n_total_added
print 'n_total_adj:', n_total_adj
indi = np.where(n_total_adj == np.max(n_total_adj))[0][0]
# indi = np.where(n_total_added == np.max(n_total_added))[0][0]
z_fit = z_total_added[[indi - 1, indi, indi + 1]]; print 'z_fit', z_fit
n_fit = n_total_added[[indi - 1, indi, indi + 1]]; print 'n_fit', n_fit
def gaussian_func(x, a, mu):
sigma=0.025
return a * np.exp(-(x-mu)**2/(2*(sigma**2)))
if (n_fit[0]<0.) and (n_fit[2]<0.):
popt, pcov = curve_fit(gaussian_func, z_fit, [0,n_fit[1],0], p0=[n_fit[1],z_fit[1]])
else:
popt, pcov = curve_fit(gaussian_func, z_fit,
n_fit, p0=[n_fit[1], z_fit[1]])
# signal=tuple(popt)[0]
# def v_func(z):
# return (z**2+2*z)/(z**2+2*z+2)
# signal=((np.max(n_total)-np.mean(n_total))*(v_func(z_max)*(4000))**2)/5.3e6 #normalization for r~1 at z~0.15
# signal = (
# (tuple(popt)[0]) * (cosmo.luminosity_distance(tuple(popt)[1]).value)**1.5) / 5.3e5 # normalization for r~1 at z~0.15
def lum_function(M):
alpha = -1.20
Nb = np.log(10) / 2.5 * 0.002 * (70 / 50.)**3
Mb_s = -21. + 5 * np.log10(70 / 50.)
return Nb * (10.**(0.4 * (alpha + 1) * (Mb_s - M))) * np.exp(-10.**(0.4 * (Mb_s - M)))
def distance(z):
return cosmo.luminosity_distance(z).value
def abs_mag(m, z):
return m - 5 * np.log10(distance(z) * 1e6) + 5
def NFW_profile(r):
rs = 1. # Mpc
rho0 = 500.
return rho0 / (r / rs * (1 + r / rs)**2)
lumfn=pd.read_csv('/Users/taweewat/Documents/red_sequence/coma_cluster_luminosity_function/schecter_fn.csv',\
names=['M_r','theta(M)Mpc^-3'])
h=0.7
x=lumfn['M_r']+5*np.log10(h);
y=lumfn['theta(M)Mpc^-3']*(h**3)
f1d=interp1d(x, y,kind='cubic')
# lum_factor = integrate.quad(lum_function, -24, abs_mag(21.60, tuple(popt)[1]))[0]
# lum_factor = cosmo.luminosity_distance(tuple(popt)[1]).value**-1.5*100
lum_factor = integrate.quad( f1d, -23.455, abs_mag(22.25, tuple(popt)[1]))[0]
#-23.455: min abs Mag from schecter_fn.csv, 22.25: median of Mag r
density_factor=integrate.quad(NFW_profile, 0.001, cosmo.kpc_proper_per_arcmin(tuple(popt)[1]).value/1e3)[0]
signal = tuple(popt)[0] / (lum_factor * density_factor)
z_max_fit = tuple(popt)[1]
print 'z_max_fit', z_max_fit
print 'lum_factor:', lum_factor
print 'density_factor', density_factor
# duplicate=False ## set duplication
if duplicate:
dff = dfi.copy()
fig=plt.figure(figsize=(10,3.5))
plt.subplot(1,3,1)
bin_width=0.035
ns1, xs1 = np.histogram(dff['z_gmr'].dropna(), bins=bins_gmr_edge, weights=np.array(dff['w_gmr'].dropna()))
bin_cen1 = (xs1[:-1] + xs1[1:])/2
ns2,xs2=np.histogram(dff['z_rmi'].dropna(),bins=bins_rmi_edge,weights=np.array(dff['w_rmi'].dropna()))
bin_cen2 = (xs2[:-1] + xs2[1:])/2
plt.bar(bin_cen1, ns1, width=0.035, color='#ff7f0e')
plt.bar(bin_cen2, ns2, width=0.035, color='#1f77b4')
if np.max(np.append(ns1,ns2))<30:
plt.ylim(0,30)
plt.xlabel('all objects')
plt.xlim(0.1, 0.7)
plt.subplot(1,3,2)
dff_dup=dff.dropna()
ns_dup1,xs_dup1=np.histogram(dff_dup['z_gmr'],bins=bins_gmr_edge,weights=np.array(dff_dup['w_gmr']))
bin_cen1 = (xs1[:-1] + xs1[1:])/2
ns_dup2,xs_dup2=np.histogram(dff_dup['z_rmi'],bins=bins_rmi_edge,weights=np.array(dff_dup['w_rmi']))
bin_cen2 = (xs2[:-1] + xs2[1:])/2
plt.bar(bin_cen1, ns_dup1, width=0.035, color='#ff7f0e')
plt.bar(bin_cen2, ns_dup2, width=0.035, color='#1f77b4')
if np.max(np.append(ns_dup1,ns_dup2))<30:
plt.ylim(0,30)
plt.xlabel('duplicate')
plt.xlim(0.1,0.7)
n_total_dup=np.append(ns1-ns_dup1,ns2-ns_dup2)
n_bkg_dup = np.mean(sorted(n_total_dup)[2:-2])
z_total_added = np.insert(
np.append(z_total, z_total[-1] + bin_width), 0, z_total[0] - bin_width)
n_total_added = np.insert(np.append(n_total_dup, 0), 0, 0) - n_bkg_dup
indi = np.where(n_total_added == np.max(n_total_added))[0][0]
z_fit_dup = z_total_added[[indi - 1, indi, indi + 1]]
n_fit_dup = n_total_added[[indi - 1, indi, indi + 1]]
popt_dup, pcov_dup = curve_fit(gaussian_func, z_fit_dup, n_fit_dup, p0=[n_fit_dup[1],z_fit_dup[1]])
signal_dup= tuple(popt_dup)[0] / (lum_factor * density_factor)
z_max_fit=tuple(popt_dup)[1]
plt.subplot(1, 3, 3)
plt.bar(bin_cen1, ns1 - ns_dup1, width=0.035, color='#ff7f0e')
plt.bar(bin_cen2, ns2 - ns_dup2, width=0.035, color='#1f77b4')
if np.max(n_total_dup)<30:
plt.ylim(0,30)
plt.xlabel('all objects-duplicate')
plt.xlim(0.1, 0.7)
plt.axvline(z_max_fit,ls='--',color='purple',label='z_max:%.2f'%z_max_fit)
plt.axvline(redshift,color='red',label='z:%.2f'%redshift)
plt.legend(loc='best')
plt.tight_layout()
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_color_plots/redsq_dup_%s_all_%.3f_%s_tilted.png' %
(mode, signal_dup, field), dpi=120)
plt.close(fig)
else:
n_total_dup=0
## Plot the figure
cmap=matplotlib.cm.RdYlGn
if duplicate or colorerr:
fig,ax=plt.subplots(1,5,figsize=(25,5))
else:
fig,ax=plt.subplots(1,4,figsize=(20,5))
make_images(field,ax[0])
norm = matplotlib.colors.Normalize(vmin=0.01,vmax=2)
dfi_ri=dfi.loc[dfi['z_rmi'].dropna().index]
ax[1].scatter(dfi['MAG_i'],dfi['MAG_r']-dfi['MAG_i'],c='black',alpha=0.1)#dfi['w_rmi'],cmap=cmap)
ax[1].scatter(dfi_ri['MAG_i'],dfi_ri['MAG_r']-dfi_ri['MAG_i'],c=dfi_ri['w_rmi'],cmap=cmap)#,norm=norm)
ax[1].errorbar(dfi_ri['MAG_i'],dfi_ri['MAG_r']-dfi_ri['MAG_i'],xerr=dfi_ri['MAGERR_i'],yerr=np.sqrt(dfi_ri['MAGERR_r']**2+dfi_ri['MAGERR_i']**2),fmt='none',c='k',alpha=0.05)
# plt.plot(df.App_i,df.App_r-df.App_i,'.')
# ax[1].axhline(xs[:-1][(xs[:-1]<1.33) & (xs[:-1]>0.6)][0],lw=0.7,color='green')
for z in bins_rmi_cen:
ax[1].plot(I,linear_rmi(I,round(z,4)),color=s_m.to_rgba(z))
ax[1].set_ylim(0.25,1.5)
ax[1].set_xlim(16,24)
# cbar=plt.colorbar(s_m)
ax[1].set_xlabel('I')
ax[1].set_ylabel('R-I')
ax[1].set_title('z=0.35-0.675')#, icut:'+str(magi_cut_rmi))
# plt.plot([corr_f(z) for z in df.z.values[5:-12]],df.App_r[5:-12]-df.App_i[5:-12],'-')
dfi_gr=dfi.loc[dfi['z_gmr'].dropna().index]
ax[2].scatter(dfi['MAG_i'],dfi['MAG_g']-dfi['MAG_r'],c='black',alpha=0.1)#,c=dfi['w_gmr'],cmap=cmap)
ax[2].scatter(dfi_gr['MAG_i'],dfi_gr['MAG_g']-dfi_gr['MAG_r'],c=dfi_gr['w_gmr'],cmap=cmap)#,norm=norm)
ax[2].errorbar(dfi_gr['MAG_i'],dfi_gr['MAG_g']-dfi_gr['MAG_r'],xerr=dfi_gr['MAGERR_i'],yerr=np.sqrt(dfi_gr['MAGERR_g']**2+dfi_gr['MAGERR_r']**2),fmt='none',c='k',alpha=0.05)
# plt.plot(df.App_i,df.App_g-df.App_r,'.')
# ax[2].axhline(xs[:-1][(xs[:-1]<1.65) & (xs[:-1]>np.min(x2))][0],lw=0.7,color='green')
for z in bins_gmr_cen:
ax[2].plot(I,linear_gmr(I,round(z,4)),color=s_m.to_rgba(z))
ax[2].set_ylim(0.75,2)
ax[2].set_xlim(16,24)
# cbar=plt.colorbar(s_m)
ax[2].set_xlabel('I')
ax[2].set_ylabel('G-R')
ax[2].set_title('z=0.15-0.325')
# plt.plot([corr_f(z) for z in df.z.values[:-25]],df.App_g[:-25]-df.App_r[:-25],'-')
xs=np.arange(np.min(z_fit)-0.1,np.max(z_fit)+0.1,0.001)
ax[3].bar(bin_cen2, ns2, width=bin_width, color='#1f77b4') #widht = 0.025
ax[3].bar(bin_cen1, ns1, width=bin_width, color='#ff7f0e') #width = 0.025
ax[3].axvline(z_max,ls='--',color='purple',label='z_max:%.2f'%z_max)
ax[3].axvline(redshift,color='red',label='z:%.2f'%redshift)
ax[3].plot(z_fit,n_fit+n_bkg,'o',c='tab:purple')
ax[3].plot(xs, gaussian_func(xs, *popt)+n_bkg, c='tab:green', ls='--', label='fit: a=%.2f, mu=%.4f'% tuple(popt))
ax[3].axhline(n_median,color='tab:green',label='median:%.2f'%n_median)
ax[3].axhline(n_mean,color='tab:red',label='mean:%.2f'%n_mean)
ax[3].legend(loc='best')
ax[3].set_xlabel('z')
ax[3].set_xlim(0.1,0.7)
ax[3].set_title('ebv:%.3f,ebv_g-r:-%.3f,ebv_r-i:-%.3f'%(ebv[0],ebv_g-ebv_r,ebv_r-ebv_i))
if np.max(n_total)<30:
ax[3].set_ylim(0,30)
if duplicate:
xs = np.arange(np.min(z_fit_dup) - 0.1, np.max(z_fit_dup) + 0.1, 0.001)
ax[4].bar(bin_cen2, ns2-ns_dup2, width=bin_width, color='#1f77b4') #widht = 0.025
ax[4].bar(bin_cen1, ns1-ns_dup1, width=bin_width, color='#ff7f0e') #width = 0.025
ax[4].axvline(z_max,ls='--',color='purple',label='z_max:%.2f'%z_max)
ax[4].axvline(redshift,color='red',label='z:%.2f'%redshift)
ax[4].plot(z_fit_dup,n_fit_dup+n_bkg_dup,'o',c='tab:purple')
ax[4].plot(xs, gaussian_func(xs, *popt_dup)+n_bkg_dup, c='tab:green', ls='--', label='fit: a=%.2f, mu=%.4f'% tuple(popt))
ax[4].legend(loc='best')
ax[4].set_xlabel('z')
ax[4].set_xlim(0.1,0.7)
if np.max(n_total)<30:
ax[4].set_ylim(0,30)
if colorerr:
dfi_rmi = dfi[~np.isnan(dfi['z_rmi'])]
dfi_gmr = dfi[~np.isnan(dfi['z_gmr'])]
zs_gmr = np.arange(0.11, 0.3425, 0.002)
zs_rmi = np.arange(0.3425, 0.65, 0.002)
ntot_rmi = np.repeat(0, len(zs_rmi))
ntot_gmr = np.repeat(0, len(zs_gmr))
for i, row in dfi_rmi.iterrows():
# for i, row in dfi.iterrows():
i0 = row['MAG_i']
rmi = row['MAG_r'] - row['MAG_i']
rmierr = np.sqrt((row['MAGERR_r'])**2 + row['MAGERR_i']**2)
ntot_rmi0 = scipy.stats.norm(rmi, rmierr).pdf(
linear_rmi(i0, zs_rmi))
ntot_rmi = ntot_rmi + ntot_rmi0 * row['w_rmi']
ax[4].plot(zs_rmi,ntot_rmi0*row['w_rmi'],'-',color='tab:red',alpha=0.2)
for i, row in dfi_gmr.iterrows():
# for i, row in dfi.iterrows():
i0 = row['MAG_i']
gmr = row['MAG_g'] - row['MAG_r']
gmrerr = np.sqrt((row['MAGERR_g'])**2 + row['MAGERR_r']**2)
ntot_gmr0 = scipy.stats.norm(gmr, gmrerr).pdf(
linear_gmr(i0, zs_gmr))
ntot_gmr = ntot_gmr + ntot_gmr0 * row['w_gmr']
ax[4].plot(zs_gmr,ntot_gmr0*row['w_gmr'],'-',color='tab:cyan',alpha=0.2)
ax[4].plot(zs_gmr, ntot_gmr, '.')
ax[4].plot(zs_rmi, ntot_rmi, '.')
ax[4].axvline(z_max,ls='--',color='purple',label='z_max:%.2f'%z_max)
ax[4].axvline(redshift,color='red',label='z:%.2f'%redshift)
ax[4].legend(loc='best')
ax[4].set_xlabel('z')
ax[4].set_xlim(0.1, 0.7)
if np.max(np.append(ntot_gmr,ntot_rmi)) < 200:
ax[4].set_ylim(0, 200)
n_total_cerr=np.append(ntot_gmr,ntot_rmi)
else:
n_total_cerr=0
signal_final = signal_dup if duplicate else signal
plt.tight_layout(rect=[0, 0., 1, 0.98])
purge('/Users/taweewat/Documents/red_sequence/pisco_color_plots/',
'redsq_richg%s_%s_all_.*_%s_tilted.png' % ('', mode, field))
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_color_plots/redsq_richg%s_%s_all_%.3f_%s_tilted.png' % ('',mode,signal_final,field), dpi=120)
plt.close(fig)
# fig,ax=plt.subplots(1,4,figsize=(20,5))
# make_images(field,ax[0])
# dfi_gmi=dfi[~np.isnan(dfi['z_gmi'])]
# zs_gmi=np.arange(0.115,0.69,0.002)
# ntot_gmi=np.repeat(0,len(zs_gmi))
# for i, row in dfi_gmi.iterrows():
# i0 = row['MAG_i']
# gmi = row['MAG_g'] - row['MAG_i']
# gmierr = np.sqrt((row['MAGERR_g'])**2 + row['MAGERR_i']**2)
# ntot_gmi0 = scipy.stats.norm(gmi, gmierr).pdf(
# linear_gmi(i0, zs_gmi))
# ntot_gmi = ntot_gmi + ntot_gmi0 * row['w_gmi']
# ax[3].plot(zs_gmi,ntot_gmi0*row['w_gmi'],'-',color='tab:cyan',alpha=0.2)
# ax[1].scatter(dfi['MAG_i'],dfi['MAG_g']-dfi['MAG_i'],c='black',alpha=0.1)#dfi['w_rmi'],cmap=cmap)
# ax[1].scatter(dfi_gmi['MAG_i'],dfi_gmi['MAG_g']-dfi_gmi['MAG_i'],c=dfi_gmi['w_gmi'],cmap=cmap)
# ax[1].errorbar(dfi_gmi['MAG_i'], dfi_gmi['MAG_g'] - dfi_gmi['MAG_i'], xerr=dfi_gmi['MAGERR_i'],
# yerr=np.sqrt(dfi_gmi['MAGERR_g']**2 + dfi_gmi['MAGERR_i']**2), fmt='none', c='k', alpha=0.05)
# for z in np.arange(0.15, 0.71, bin_width):
# ax[1].plot(I,linear_gmi(I,z),color=s_m.to_rgba(z))
# ax[1].set_ylim(1.0,3.5)
# ax[1].set_xlim(16,24)
# ax[1].set_xlabel('I')
# ax[1].set_ylabel('G-I')
# ax[1].set_title('z=0.15-0.675')
# ns3,xs3=np.histogram(z_gmi,bins=np.arange(0.1325,0.7,0.035),weights=np.array(w_gmi)*np.array(w_col_gmi))
# bin_cen3 = (xs3[:-1] + xs3[1:])/2
# z_max_gmi = bin_cen3[np.where(ns3 == np.max(ns3))[0][0]]
# n_bkg = np.mean(sorted(ns3)[2:-2]);
# z_total_added = np.insert(
# np.append(bin_cen3, bin_cen3[-1] + bin_width), 0, bin_cen3[0] - bin_width)
# n_total_added = np.insert(np.append(ns3, 0), 0, 0) - n_bkg
# indi = np.where(n_total_added == np.max(n_total_added))[0][0]
# z_fit = z_total_added[[indi - 1, indi, indi + 1]]; print 'z_fit', z_fit
# n_fit = n_total_added[[indi - 1, indi, indi + 1]]; print 'n_fit', n_fit
# if (n_fit[0]<0.) and (n_fit[2]<0.):
# popt_gmi, pcov_gmi = curve_fit(gaussian_func, z_fit, [0,n_fit[1],0], p0=[n_fit[1],z_fit[1]])
# else:
# popt_gmi, pcov_gmi = curve_fit(gaussian_func, z_fit,
# n_fit, p0=[n_fit[1], z_fit[1]])
# lum_factor2 = integrate.quad( f1d, -23.455, abs_mag(22.25, tuple(popt_gmi)[1]))[0]
# density_factor2=integrate.quad(NFW_profile,0.001,cosmo.kpc_proper_per_arcmin(tuple(popt_gmi)[1]).value/1e3)[0]
# signal_gmi = tuple(popt_gmi)[0] / (lum_factor2 * density_factor2)
# z_max_fit_gmi = tuple(popt_gmi)[1]
# ax[2].bar(bin_cen3, ns3, width = 0.035, color='#1f77b4')#, alpha=0.5)
# ax[2].axvline(z_max_gmi, ls='--', color='purple',
# label='z_max=%.3f'%z_max_gmi)
# ax[2].axvline(z_max_fit_gmi, ls='--', color='tab:green',
# label='z_max_fit=%.3f'%z_max_fit_gmi)
# ax[2].axvline(redshift,color='red',label='z:%.3f'%redshift)
# ax[2].plot(z_fit,n_fit+n_bkg,'o',c='tab:purple')
# xs=np.arange(np.min(z_fit)-0.1,np.max(z_fit)+0.1,0.001)
# ax[2].plot(xs, gaussian_func(xs, *popt_gmi) + n_bkg, c='tab:green',
# ls='--', label='fit: a=%.2f, mu=%.4f' % tuple(popt_gmi))
# ax[2].legend(loc='best')
# ax[2].set_xlabel('z')
# ax[2].set_xlim(0.1,0.7)
# if np.max(n_total)<30:
# ax[2].set_ylim(0,30)
# ax[3].plot(zs_gmi,ntot_gmi,'.')
# ax[3].set_xlabel('z')
# ax[3].set_xlim(0.1,0.7)
# ax[3].axvline(z_max_fit_gmi,ls='--',color='purple',label='z_max_fit:%.2f'%z_max_fit_gmi)
# ax[3].axvline(redshift,color='red',label='z:%.2f'%redshift)
# if np.max(ntot_gmi)<70:
# ax[3].set_ylim(0,70)
# ntot_gmi_max=np.max(ntot_gmi)
# zs_gmi_max=zs_gmi[np.argmax(ntot_gmi)]
# ax[3].axvline(zs_gmi_max,ls='--',color='pink',label='zs_gmi_max:%.2f'%zs_gmi_max)
# plt.tight_layout(rect=[0, 0., 1, 0.98])
# plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_color_plots/redsq_gmi_%s_all_%.3f_%s_tilted.png' %
# (mode, signal_gmi, field), dpi=120)
# plt.close(fig)
# transparent=False
if transparent:
fig,ax=plt.subplots(figsize=(7,4))
ax.bar(bin_cen2, ns2, width=0.035, color='#1f77b4') #widht = 0.025
ax.bar(bin_cen1, ns1, width = 0.035, color='#ff7f0e') #width = 0.025
ax.axvline(z_max,ls='--',color='purple',label='z_max:%.2f'%z_max)
ax.set_xlabel('z')
ax.set_xlim(0.1,0.7)
if np.max(n_total)<30:
ax.set_ylim(0,30)
for axp in ax.spines:
ax.spines[axp].set_color('white')
ax.xaxis.label.set_color('white')
ax.yaxis.label.set_color('white')
ax.tick_params(axis='x', colors='white')
ax.tick_params(axis='y', colors='white')
purge('/Users/taweewat/Documents/red_sequence/pisco_color_plots/',
'redsq_transparent_%.3f_%s_tilted.png' % (signal_final,field))
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_color_plots/redsq_transparent_%.3f_%s_tilted.png' % (signal_final,field), dpi=120, transparent=True)
plt.close(fig)
red_dir='/Users/taweewat/Documents/red_sequence/'
rich_filename = 'all_richness_%s.csv'%extra_name
if not os.path.isfile(red_dir + rich_filename):
os.system("cp %s %s"%(red_dir+'all_richness_gremove_lum_silk_zf2.5.csv',red_dir+rich_filename))
df_richness=pd.read_csv(red_dir+rich_filename)
df_richness=df_richness.copy()
df_richness.loc[df_richness['name'] == field, 'Nmax'] = np.max(n_total)
df_richness.loc[df_richness['name'] == field, 'Nbkg_mean'] = np.mean(n_total)
df_richness.loc[df_richness['name'] == field, 'Nbkg_median'] = np.median(n_total)
df_richness.loc[df_richness['name'] == field, 'zmax'] = z_max
df_richness.loc[df_richness['name'] == field, 'amp'] = signal_final
df_richness.loc[df_richness['name'] == field, 'zmax_fit'] = z_max_fit
df_richness.loc[df_richness['name'] == field, 'gremove'] = nog
df_richness.loc[df_richness['name'] == field, 'lum_factor'] = lum_factor
df_richness.loc[df_richness['name'] == field, 'density_factor'] = density_factor
# df_richness.loc[df_richness['name'] == field, 'amp_gmi'] = signal_gmi
# df_richness.loc[df_richness['name'] == field, 'z_max_fit_gmi'] = z_max_fit_gmi
# df_richness.loc[df_richness['name']==field,'distance[Mpc]']=z_max*(4000)
# df_richness.loc[df_richness['name']==field,'R']=(np.max(n_total)-np.mean(n_total))*(z_max*4000)**2
df_richness.to_csv(red_dir+rich_filename,index=0)
dfi.to_csv("/Users/taweewat/Documents/pisco_code/slr_output/galaxy_%s_final_%s.csv"%(mode,field))
# get member redshfit in the figure
if img_redshift:
image_redshift(field,signal,tuple(popt)[1],mode)
# get total images with red-sequence
if img_filp:
image_flip(field,signal,tuple(popt)[1],mode)
if colorerr:
return z_total, n_total, n_total_cerr
else:
return z_total, n_total, n_total_dup
def pisco_combine_imgs(fields, mode='psf', mode2mass=''):
dir1='/Users/taweewat/Documents/red_sequence/pisco_color_plots/psf_est/'
dir2='/Users/taweewat/Documents/red_sequence/pisco_color_plots/'
dir3='/Users/taweewat/Documents/red_sequence/pisco_color_plots/'
dirout='/Users/taweewat/Documents/red_sequence/pisco_all/'
myReg = re.compile(r'(redsq_richg_%s_all_.*%s.*png)' % (mode, field))
myReg2=re.compile(r'(\d{1,3}\.\d{1,3})')
names=[]
for text in os.listdir(dir3):
if myReg.search(text) != None:
names.append(myReg.search(text).group())
if names==[]:
print 'no files', field
img1=dir1+'psf_est3_'+field+'_i.png'
img2=dir2+'star_galaxy_sep_12_all'+field+'.png'
img3=dir3+names[0]
images_list=[img1, img2, img3]
imgs=[]
try:
imgs = [ Image_PIL.open(i) for i in images_list ]
except:
print 'no image file', field
mw = imgs[2].width/2
h = imgs[0].height+imgs[1].height/2+imgs[2].height/2
result = Image_PIL.new("RGBA", (mw, h))
y,index=0,0
for i in imgs:
if (index==2) or (index==1):
i=i.resize((i.width/2,i.height/2))
result.paste(i, (0, y))
y += i.size[1]
index+=1
# result.save(dirout + 'all_combine%s_%s_%s_%s.png' %
# (mode2mass, field, mode, myReg2.search(names[0]).group()))
# result.save(dirout + 'all_combine_%s_%s_%s_%s_%s.png' %
# (extra_name, mode2mass, myReg2.search(names[0]).group(), mode, field))
def purge(dir, pattern):
for f in os.listdir(dir):
if re.search(pattern, f):
print 'remove', f
os.remove(os.path.join(dir, f))
def image_redshift(field,signal,redshift,mode):
df_total=pd.read_csv('/Users/taweewat/Documents/pisco_code/slr_output/galaxy_%s_final_%s.csv'%(mode,field),index_col=0)
df_star=pd.read_csv('/Users/taweewat/Documents/pisco_code/slr_output/star_psf_total_%s.csv'%field,index_col=0)
# df_star=df_star[df_star['SG']>0.95]
hdu=fits.open('/Users/taweewat/Documents/red_sequence/panstar/coadd_panstar_%s_i.fits'%field)
img=hdu[0].data.astype(float)
img-=np.median(img.ravel()[~np.isnan(img.ravel())])
def redshift_f(row):
if not np.isnan(row['z_gmr']):
redshift=row['z_gmr']
if not np.isnan(row['z_rmi']):
redshift=row['z_rmi']
if np.isnan(row['z_rmi']) and np.isnan(row['z_gmr']):
redshift=0
return redshift
df_total['redshift_m']=df_total.apply(lambda row: redshift_f(row), axis=1)
def size_f(row):
if not np.isnan(row['w_gmr']):
size=row['w_gmr']
if not np.isnan(row['w_rmi']):
size=row['w_rmi']
if np.isnan(row['w_rmi']) and np.isnan(row['w_gmr']):
size=0
return size
df_total['size_m']=df_total.apply(lambda row: size_f(row), axis=1)
df_total0=df_total.copy()
df_total=df_total[df_total['redshift_m'] > 0].copy()
norm = matplotlib.colors.Normalize(vmin=-2,vmax=4)
c_m = matplotlib.cm.Greys_r
s_m = matplotlib.cm.ScalarMappable(cmap=c_m, norm=norm)
s_m.set_array([])
normalize = matplotlib.colors.Normalize(vmin=0.1, vmax=0.7)
fig, (a0, a1) = plt.subplots(1,2, figsize=(30,18), gridspec_kw = {'width_ratios':[0.8, 1]})
# a0.imshow(img, cmap=c_m, norm=norm, origin='lower')
# a0.scatter(df_star['XWIN_IMAGE_i'].values,df_star['YWIN_IMAGE_i'].values,s=100, marker='*', facecolors='none', edgecolors='yellow', label='star')
# df1i=df_total[df_total['w_rmi']>0.1]
# df2i=df_total[df_total['w_rmi']<=0.1]
# # a0.scatter(df1i['XWIN_IMAGE_i'].values,df1i['YWIN_IMAGE_i'].values,s=100, facecolors='none', edgecolors='blue')
# a0.scatter(df1i['XWIN_IMAGE_i'].values, df1i['YWIN_IMAGE_i'].values, s=100, c=df1i['size_m'].values, cmap='RdYlGn')
# a0.scatter(df2i['XWIN_IMAGE_i'].values,df2i['YWIN_IMAGE_i'].values, s=100, facecolors='none', edgecolors='white')
# a0.set_xlim(0,1600)
# a0.set_ylim(0, 2250)
try:
img2 = mpimg.imread('/Users/taweewat/Documents/pisco_code/Chips_images/aplpy_panstar_%s_img.jpeg' % field)
except:
img2 = mpimg.imread('/Users/taweewat/Documents/pisco_code/Chips_images/aplpy_panstar_%s_img4.jpeg' % field)
imgplot = a0.imshow(img2)
a0.axis('off')
a0.annotate('Redshift: %.3f\nRichness: %.2f' %
(redshift, signal), xy=(150, 100), color='white')
a1.imshow(img, cmap=c_m, norm=norm, origin='lower')
a1.scatter(df_star['XWIN_IMAGE_i'].values,df_star['YWIN_IMAGE_i'].values, s=300,edgecolor='orange', facecolor='none',lw=3)
a1.scatter(df_total0['XWIN_IMAGE_i'].values,df_total0['YWIN_IMAGE_i'].values, s=150,edgecolor='tab:blue', facecolor='none',lw=1,alpha=0.5)
#,s=100, marker='*', facecolors='none', edgecolors='yellow', label='star')
axi = a1.scatter(df_total['XWIN_IMAGE_i'].values, df_total['YWIN_IMAGE_i'].values,
s=(df_total['size_m'].values * 200)+30, c=df_total['redshift_m'].values, cmap='tab20b', norm=normalize)
plt.colorbar(axi) # df_total['size_m'].values*300
# a1.set_xlim(0, 1600)
# a1.set_ylim(0, 2250)
plt.tight_layout()
left, bottom, width, height = [0.05, 0.24, 0.3, 0.2]
ax2 = fig.add_axes([left, bottom, width, height])
ax2.imshow(mpimg.imread(
'/Users/taweewat/Documents/red_sequence/pisco_color_plots/redsq_transparent_%.3f_%s_tilted.png' % (signal, field)))
ax2.axes.get_xaxis().set_visible(False)
ax2.axes.get_yaxis().set_visible(False)
ax2.axis('off')
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_image_redshift/img_redshift_%s_%.3f_%s.png' %
(mode, signal,field), dpi=50)
plt.close(fig)
def image_flip(field, signal, redshift, mode):
img = mpimg.imread(
'/Users/taweewat/Documents/pisco_code/Chips_images/aplpy4_%s_img.jpeg' % field)
fig, ax = plt.subplots(figsize=(7, 7))
imgplot = ax.imshow(img)
ax.axis('off')
ax.annotate('Redshift: %.3f\nRichness: %.2f' %
(redshift, signal), xy=(150, 100), color='white')
left, bottom, width, height = [0.2, 0.18, 0.3, 0.2]
ax2 = fig.add_axes([left, bottom, width, height])
ax2.imshow(mpimg.imread(
'/Users/taweewat/Documents/red_sequence/pisco_color_plots/redsq_transparent_%.3f_%s_tilted.png' % (signal, field)))
ax2.axes.get_xaxis().set_visible(False)
ax2.axes.get_yaxis().set_visible(False)
ax2.axis('off')
# plt.tight_layout()
plt.savefig('/Users/taweewat/Documents/red_sequence/pisco_image_redshift/image_flip_%s_%.3f_%s.png' %
(mode, signal, field), dpi=200)
plt.close(fig)
if __name__ == "__main__":
"""
execute:
python pisco_pipeline/pisco_photometry_all.py CHIPS111 psf slr
#updated version with no2mass option for no more comparison with known 2mass stars
python pisco_pipeline/pisco_photometry_all.py CHIPS111 psf allslr no2mass
"""
print 'Number of arguments:', len(sys.argv), 'arguments.'
print 'Argument List:', str(sys.argv)
field = str(sys.argv[1])
mode = str(sys.argv[2]) #aper, psf, auto, hybrid
all_argv=sys.argv[3:] #allslr, slr, noslr
if (all_argv[0]=='allslr') | (all_argv[0]=='slr'):
slr=str(all_argv[0])
slr_param=True
elif all_argv[0]=='noslr':
slr='no_slr'
slr_param=False
if all_argv[1]=='2mass':
mode2mass=''
elif all_argv[1]=='no2mass':
mode2mass='_no2mass'
home='/Users/taweewat/Documents/pisco_code/' #09, 171208
# dirs=['ut170103/','ut170104/','ut170619/','ut170621/','ut170624/','ut171208/','ut171209/','ut171212/']
# 'ut171208/', 'ut171209/','ut171212/', 'ut170621/', 'ut170624/'
dirs = ['ut170619/']
# dirs = ['ut171209/']
names=[]
myReg=re.compile(r'(CHIPS\d{4}[+-]\d{4})|(Field\d{3})')
for di in dirs:
dir=home+di
for text in os.listdir(dir):
if myReg.search(text) != None:
names.append(myReg.search(text).group())
all_fields=list(set(names))
exception = ['CHIPS0525-6938', 'Field234']
z_total_all,n_total_all,n_total_dup_all=[],[],[]
# all_fields=['CHIPS1911+4455']
all_fields=['CHIPS1011-0505']
all_fields_cut = all_fields[:]
notgoflag=True
for index, field in enumerate(all_fields_cut):
print field, '%i/%i' % (index, len(all_fields_cut))
# if field == 'CHIPS0122-2646':
# notgoflag = False; continue
# if notgoflag:
# continue
if field in exception:
continue
if slr=='allslr':
# star_galaxy_bleem(field)
pisco_photometry_v4(field)
# pisco_cut_frame(field)
elif slr=='slr':
# star_galaxy_bleem(field)
# pisco_photometry_v4(field)
panstar_cut_star(field)
# pisco_cut_frame(field)
pisco_photometry_psf_v4(field, mode=mode, mode2mass=mode2mass, slr=slr_param)
purge('/Users/taweewat/Documents/red_sequence/pisco_color_plots/'\
,r'(redsq_%s_all_.*%s.*png)'%(mode,field))
z_total,n_total,n_total_dup=pisco_tilt_resequence(field, mode=mode, mode2mass=mode2mass)
z_total_all.append(z_total)
n_total_all.append(n_total)
n_total_dup_all.append(n_total_dup)
# pisco_combine_imgs(field, mode=mode, mode2mass=mode2mass)
pickle.dump( [z_total_all,n_total_all,n_total_dup_all], open( "pickle_all_richness_%s.pickle"%extra_name, "wb" ) )
print 'save pickle fie at', "pickle_all_richness_%s.pickle" % extra_name
elif slr == 'no_slr':
# pisco_cut_frame(field)
panstar_cut_star(field)
pisco_photometry_psf_v4(field, mode=mode, mode2mass=mode2mass, slr=slr_param)
purge('/Users/taweewat/Documents/red_sequence/pisco_color_plots/'\
,r'(redsq_%s_all_.*%s.*png)'%(mode,field))
z_total,n_total,n_total_dup=pisco_tilt_resequence(field, mode=mode, mode2mass=mode2mass)
z_total_all.append(z_total)
n_total_all.append(n_total)
n_total_dup_all.append(n_total_dup)
# pisco_combine_imgs(field, mode=mode, mode2mass=mode2mass)
pickle.dump( [z_total_all,n_total_all,n_total_dup_all], open( "pickle_all_richness_%s.pickle"%extra_name, "wb" ) )
print 'save pickle fie at', "pickle_all_richness_%s.pickle" % extra_name
purge('final', "proj_coadd_c%s_.*\.fits" % field)
purge('.', "proto_psf_%s_.*\.fits" % field)
purge('.', "samp_psf_%s_.*\.fits" % field)
purge('.', "resi_psf_%s_.*\.fits" % field)
purge('.', "snap_psf_%s_.*\.fits" % field)
purge('.', "chi_psf_%s_.*\.fits" % field)
# purge('psfex_output', "psf_%s_.*\.fits" % field)
# purge('slr_output', "a_psf_%s_.*\.fits" % field)
purge('final', "coadd_c%s_sq_.*\.fits" % field)
|
lasote/conan
|
refs/heads/develop
|
conans/test/generators/cmake_test.py
|
1
|
import re
import unittest
from collections import namedtuple
from conans.client.generators.cmake_multi import CMakeMultiGenerator
from conans.model.settings import Settings
from conans.model.conan_file import ConanFile
from conans.client.generators.cmake import CMakeGenerator
from conans.model.build_info import CppInfo
from conans.model.ref import ConanFileReference
from conans.client.conf import default_settings_yml
class CMakeGeneratorTest(unittest.TestCase):
def _extract_macro(self, name, text):
pattern = ".*(macro\(%s\).*?endmacro\(\)).*" % name
return re.sub(pattern, r"\1", text, flags=re.DOTALL)
def variables_setup_test(self):
conanfile = ConanFile(None, None, Settings({}), None)
ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables")
cpp_info = CppInfo("dummy_root_folder1")
cpp_info.defines = ["MYDEFINE1"]
conanfile.deps_cpp_info.update(cpp_info, ref.name)
ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables")
cpp_info = CppInfo("dummy_root_folder2")
cpp_info.defines = ["MYDEFINE2"]
conanfile.deps_cpp_info.update(cpp_info, ref.name)
conanfile.deps_user_info["LIB1"].myvar = "myvalue"
conanfile.deps_user_info["LIB1"].myvar2 = "myvalue2"
conanfile.deps_user_info["lib2"].MYVAR2 = "myvalue4"
generator = CMakeGenerator(conanfile)
content = generator.content
cmake_lines = content.splitlines()
self.assertIn("set(CONAN_DEFINES_MYPKG -DMYDEFINE1)", cmake_lines)
self.assertIn("set(CONAN_DEFINES_MYPKG2 -DMYDEFINE2)", cmake_lines)
self.assertIn("set(CONAN_COMPILE_DEFINITIONS_MYPKG MYDEFINE1)", cmake_lines)
self.assertIn("set(CONAN_COMPILE_DEFINITIONS_MYPKG2 MYDEFINE2)", cmake_lines)
self.assertIn('set(CONAN_USER_LIB1_myvar "myvalue")', cmake_lines)
self.assertIn('set(CONAN_USER_LIB1_myvar2 "myvalue2")', cmake_lines)
self.assertIn('set(CONAN_USER_LIB2_MYVAR2 "myvalue4")', cmake_lines)
def variables_cmake_multi_user_vars_test(self):
settings_mock = namedtuple("Settings", "build_type, constraint")
conanfile = ConanFile(None, None, settings_mock("Release", lambda x: x), None)
conanfile.deps_user_info["LIB1"].myvar = "myvalue"
conanfile.deps_user_info["LIB1"].myvar2 = "myvalue2"
conanfile.deps_user_info["lib2"].MYVAR2 = "myvalue4"
generator = CMakeMultiGenerator(conanfile)
content = generator.content["conanbuildinfo_multi.cmake"]
cmake_lines = content.splitlines()
self.assertIn('set(CONAN_USER_LIB1_myvar "myvalue")', cmake_lines)
self.assertIn('set(CONAN_USER_LIB1_myvar2 "myvalue2")', cmake_lines)
self.assertIn('set(CONAN_USER_LIB2_MYVAR2 "myvalue4")', cmake_lines)
def variables_cmake_multi_user_vars_escape_test(self):
settings_mock = namedtuple("Settings", "build_type, constraint")
conanfile = ConanFile(None, None, settings_mock("Release", lambda x: x), None)
conanfile.deps_user_info["FOO"].myvar = 'my"value"'
conanfile.deps_user_info["FOO"].myvar2 = 'my${value}'
conanfile.deps_user_info["FOO"].myvar3 = 'my\\value'
generator = CMakeMultiGenerator(conanfile)
content = generator.content["conanbuildinfo_multi.cmake"]
cmake_lines = content.splitlines()
self.assertIn(r'set(CONAN_USER_FOO_myvar "my\"value\"")', cmake_lines)
self.assertIn(r'set(CONAN_USER_FOO_myvar2 "my\${value}")', cmake_lines)
self.assertIn(r'set(CONAN_USER_FOO_myvar3 "my\\value")', cmake_lines)
def multi_flag_test(self):
conanfile = ConanFile(None, None, Settings({}), None)
ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables")
cpp_info = CppInfo("dummy_root_folder1")
cpp_info.includedirs.append("other_include_dir")
cpp_info.cppflags = ["-DGTEST_USE_OWN_TR1_TUPLE=1", "-DGTEST_LINKED_AS_SHARED_LIBRARY=1"]
conanfile.deps_cpp_info.update(cpp_info, ref.name)
ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables")
cpp_info = CppInfo("dummy_root_folder2")
cpp_info.cflags = ["-DSOMEFLAG=1"]
conanfile.deps_cpp_info.update(cpp_info, ref.name)
generator = CMakeGenerator(conanfile)
content = generator.content
cmake_lines = content.splitlines()
self.assertIn('set(CONAN_C_FLAGS_MYPKG2 "-DSOMEFLAG=1")', cmake_lines)
self.assertIn('set(CONAN_CXX_FLAGS_MYPKG "-DGTEST_USE_OWN_TR1_TUPLE=1'
' -DGTEST_LINKED_AS_SHARED_LIBRARY=1")', cmake_lines)
self.assertIn('set(CONAN_C_FLAGS "-DSOMEFLAG=1 ${CONAN_C_FLAGS}")', cmake_lines)
self.assertIn('set(CONAN_CXX_FLAGS "-DGTEST_USE_OWN_TR1_TUPLE=1'
' -DGTEST_LINKED_AS_SHARED_LIBRARY=1 ${CONAN_CXX_FLAGS}")', cmake_lines)
def aux_cmake_test_setup_test(self):
conanfile = ConanFile(None, None, Settings({}), None)
generator = CMakeGenerator(conanfile)
aux_cmake_test_setup = generator.content
# extract the conan_basic_setup macro
macro = self._extract_macro("conan_basic_setup", aux_cmake_test_setup)
self.assertEqual("""macro(conan_basic_setup)
set(options TARGETS NO_OUTPUT_DIRS SKIP_RPATH)
cmake_parse_arguments(ARGUMENTS "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
if(CONAN_EXPORTED)
message(STATUS "Conan: called by CMake conan helper")
endif()
conan_check_compiler()
if(NOT ARGUMENTS_NO_OUTPUT_DIRS)
conan_output_dirs_setup()
endif()
conan_set_find_library_paths()
if(NOT ARGUMENTS_TARGETS)
message(STATUS "Conan: Using cmake global configuration")
conan_global_flags()
else()
message(STATUS "Conan: Using cmake targets configuration")
conan_define_targets()
endif()
if(NOT ARGUMENTS_SKIP_RPATH)
message(STATUS "Conan: Adjusting default RPATHs Conan policies")
conan_set_rpath()
endif()
conan_set_vs_runtime()
conan_set_libcxx()
conan_set_find_paths()
endmacro()""", macro)
# extract the conan_set_find_paths macro
macro = self._extract_macro("conan_set_find_paths", aux_cmake_test_setup)
self.assertEqual("""macro(conan_set_find_paths)
# CMAKE_MODULE_PATH does not have Debug/Release config, but there are variables
# CONAN_CMAKE_MODULE_PATH_DEBUG to be used by the consumer
# CMake can find findXXX.cmake files in the root of packages
set(CMAKE_MODULE_PATH ${CONAN_CMAKE_MODULE_PATH} ${CMAKE_MODULE_PATH})
# Make find_package() to work
set(CMAKE_PREFIX_PATH ${CONAN_CMAKE_MODULE_PATH} ${CMAKE_PREFIX_PATH})
# Set the find root path (cross build)
set(CMAKE_FIND_ROOT_PATH ${CONAN_CMAKE_FIND_ROOT_PATH} ${CMAKE_FIND_ROOT_PATH})
if(CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM)
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ${CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM})
endif()
if(CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ${CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY})
endif()
if(CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ${CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE})
endif()
endmacro()""", macro)
def name_and_version_are_generated_test(self):
conanfile = ConanFile(None, None, Settings({}), None)
conanfile.name = "MyPkg"
conanfile.version = "1.1.0"
generator = CMakeGenerator(conanfile)
content = generator.content
cmake_lines = content.splitlines()
self.assertIn('set(CONAN_PACKAGE_NAME MyPkg)', cmake_lines)
self.assertIn('set(CONAN_PACKAGE_VERSION 1.1.0)', cmake_lines)
def settings_are_generated_tests(self):
settings = Settings.loads(default_settings_yml)
settings.os = "Windows"
settings.compiler = "Visual Studio"
settings.compiler.version = "12"
settings.compiler.runtime = "MD"
settings.arch = "x86"
settings.build_type = "Debug"
conanfile = ConanFile(None, None, Settings({}), None)
conanfile.settings = settings
generator = CMakeGenerator(conanfile)
content = generator.content
cmake_lines = content.splitlines()
self.assertIn('set(CONAN_SETTINGS_BUILD_TYPE "Debug")', cmake_lines)
self.assertIn('set(CONAN_SETTINGS_ARCH "x86")', cmake_lines)
self.assertIn('set(CONAN_SETTINGS_COMPILER "Visual Studio")', cmake_lines)
self.assertIn('set(CONAN_SETTINGS_COMPILER_VERSION "12")', cmake_lines)
self.assertIn('set(CONAN_SETTINGS_COMPILER_RUNTIME "MD")', cmake_lines)
self.assertIn('set(CONAN_SETTINGS_OS "Windows")', cmake_lines)
|
levilucio/SyVOLT
|
refs/heads/master
|
UMLRT2Kiltera_MM/trace_link.py
|
1
|
"""
__trace_link.py_____________________________________________________
Automatically generated AToM3 syntactic object (DO NOT MODIFY DIRECTLY)
Author: gehan
Modified: Sat Aug 30 18:23:40 2014
____________________________________________________________________
"""
from ASGNode import *
from ATOM3Type import *
from graph_trace_link import *
class trace_link(ASGNode, ATOM3Type):
def __init__(self, parent = None):
ASGNode.__init__(self)
ATOM3Type.__init__(self)
self.superTypes = []
self.graphClass_ = graph_trace_link
self.isGraphObjectVisual = True
if(hasattr(self, '_setHierarchicalLink')):
self._setHierarchicalLink(False)
if(hasattr(self, '_setHierarchicalNode')):
self._setHierarchicalNode(False)
self.parent = parent
self.generatedAttributes = { }
self.realOrder = []
self.directEditing = []
def clone(self):
cloneObject = trace_link( self.parent )
for atr in self.realOrder:
cloneObject.setAttrValue(atr, self.getAttrValue(atr).clone() )
ASGNode.cloneActions(self, cloneObject)
return cloneObject
def copy(self, other):
ATOM3Type.copy(self, other)
for atr in self.realOrder:
self.setAttrValue(atr, other.getAttrValue(atr) )
ASGNode.copy(self, other)
def preCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.preCondition(actionID, params)
else: return None
def postCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postCondition(actionID, params)
else: return None
def preAction (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.preAction(actionID, params)
else: return None
def postAction (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postAction(actionID, params)
else: return None
def QOCA(self, params):
"""
QOCA Constraint Template
NOTE: DO NOT select a POST/PRE action trigger
Constraints will be added/removed in a logical manner by other mechanisms.
"""
return # <--- Remove this if you want to use QOCA
# Get the high level constraint helper and solver
from Qoca.atom3constraints.OffsetConstraints import OffsetConstraints
oc = OffsetConstraints(self.parent.qocaSolver)
# Constraint only makes sense if there exists 2 objects connected to this link
if(not (self.in_connections_ and self.out_connections_)): return
# Get the graphical objects (subclass of graphEntity/graphLink)
graphicalObjectLink = self.graphObject_
graphicalObjectSource = self.in_connections_[0].graphObject_
graphicalObjectTarget = self.out_connections_[0].graphObject_
objTuple = (graphicalObjectSource, graphicalObjectTarget, graphicalObjectLink)
"""
Example constraint, see Kernel/QOCA/atom3constraints/OffsetConstraints.py
For more types of constraints
"""
oc.LeftExactDistance(objTuple, 20)
oc.resolve() # Resolve immediately after creating entity & constraint
|
Hackplayers/Empire-mod-Hpys-tests
|
refs/heads/master
|
lib/modules/powershell/credentials/Invoke-WCMDump.py
|
2
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Invoke-WCMDump',
# list of one or more authors for the module
'Author': ['Barrett Adams (@peewpw)'],
# more verbose multi-line description of the module
'Description': ('Invoke-WCMDump enumerates Windows credentials in the Credential Manager '
'and then extracts available information about each one. Passwords '
'are retrieved for "Generic" type credentials , but can not be '
'retrived by the same method for "Domain" type credentials.'
'Credentials are only returned for the current user.'),
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : None,
# True if the module needs admin rights to run
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : False,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
# list of any references/other comments
'Comments': [
'comment',
'https://github.com/peewpw/Invoke-WCMDump'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to grab a screenshot from.',
'Required' : True,
'Value' : ''
},
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# the PowerShell script itself, with the command to invoke
# for execution appended to the end. Scripts should output
# everything to the pipeline for proper parsing.
#
# the script should be stripped of comments, with a link to any
# original reference script included in the comments.
script = """
"""
# if you're reading in a large, external script that might be updates,
# use the pattern below
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/credentials/Invoke-WCMDump.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
script += "Invoke-WCMDump"
# add any arguments to the end execution of the script
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
return script
|
vvv1559/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyUnresolvedReferencesInspection/UnusedImportBeforeStarDunderAll/p1/m1.py
|
80
|
import b
__all__ = []
|
josherick/bokeh
|
refs/heads/master
|
sphinx/source/docs/user_guide/source_examples/chart_timeseries_with_legend.py
|
30
|
import pandas as pd
from bokeh.charts import TimeSeries, show, output_file
# read in some stock data from the Yahoo Finance API
AAPL = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=AAPL&a=0&b=1&c=2000&d=0&e=1&f=2010",
parse_dates=['Date'])
MSFT = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=MSFT&a=0&b=1&c=2000&d=0&e=1&f=2010",
parse_dates=['Date'])
IBM = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=IBM&a=0&b=1&c=2000&d=0&e=1&f=2010",
parse_dates=['Date'])
xyvalues = pd.DataFrame(dict(
AAPL=AAPL['Adj Close'],
Date=AAPL['Date'],
MSFT=MSFT['Adj Close'],
IBM=IBM['Adj Close'],
))
output_file("stocks_timeseries.html")
p = TimeSeries(xyvalues, index='Date', legend=True,
title="Stocks", ylabel='Stock Prices')
show(p)
|
devs1991/test_edx_docmode
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/contrib/auth/migrations/0002_alter_permission_name_max_length.py
|
586
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='permission',
name='name',
field=models.CharField(max_length=255, verbose_name='name'),
),
]
|
gardster/omim
|
refs/heads/master
|
3party/protobuf/python/google/protobuf/internal/api_implementation.py
|
80
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Determine which implementation of the protobuf API is used in this process.
"""
import os
import sys
try:
# pylint: disable=g-import-not-at-top
from google.protobuf.internal import _api_implementation
# The compile-time constants in the _api_implementation module can be used to
# switch to a certain implementation of the Python API at build time.
_api_version = _api_implementation.api_version
del _api_implementation
except ImportError:
_api_version = 0
_default_implementation_type = (
'python' if _api_version == 0 else 'cpp')
_default_version_str = (
'1' if _api_version <= 1 else '2')
# This environment variable can be used to switch to a certain implementation
# of the Python API, overriding the compile-time constants in the
# _api_implementation module. Right now only 'python' and 'cpp' are valid
# values. Any other value will be ignored.
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
_default_implementation_type)
if _implementation_type != 'python':
_implementation_type = 'cpp'
# This environment variable can be used to switch between the two
# 'cpp' implementations, overriding the compile-time constants in the
# _api_implementation module. Right now only 1 and 2 are valid values. Any other
# value will be ignored.
_implementation_version_str = os.getenv(
'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION',
_default_version_str)
if _implementation_version_str not in ('1', '2'):
raise ValueError(
"unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: '" +
_implementation_version_str + "' (supported versions: 1, 2)"
)
_implementation_version = int(_implementation_version_str)
# Usage of this function is discouraged. Clients shouldn't care which
# implementation of the API is in use. Note that there is no guarantee
# that differences between APIs will be maintained.
# Please don't use this function if possible.
def Type():
return _implementation_type
# See comment on 'Type' above.
def Version():
return _implementation_version
|
mailund/CoaSim
|
refs/heads/master
|
Python/modules/CoaSim/diseaseModelling.py
|
1
|
__doc__ = '''
CoaSim/Python -- Python bindings for Coasim
Copyright (C) 2006 by Thomas Mailund <mailund@mailund.dk>
This module contains functions for split sequences into affected and
unaffected individuals, based on various disease models.
'''
from random import uniform as _uniform
def _prj(indices, lst):
'''Extract values in indices in lst (a projection to those dimensions).'''
return [lst[i] for i in indices]
def _cut(indices, lst):
'''Remove indices from lst and create a new list with the
remaining values.'''
return [lst[i] for i in xrange(len(lst)) if i not in indices]
def _splitSequences(predicate, indices, sequences, keepIndices):
'''Split the list of sequences into two lists, the first list
containing the sequences where predicate evaluates to True the
other the sequences where predicate evaluates to False. The
function predicate is called with the values indexed by indices
the individual sequences only, not the full sequences. Unless
keepIndices is set to True, the indices passed to predicate is
removed from the resulting lists.'''
if keepIndices: c = lambda i,x: x
else: c = _cut
isTrue = [] ; isFalse = []
for seq in sequences:
if predicate(*_prj(indices,seq)): isTrue.append(c(indices,seq))
else: isFalse.append(c(indices,seq))
return (isTrue,isFalse)
def _haploidToDiploid(sequences):
'''Translates an even number of haploid sequences into a list of
diploid sequences by combining them pairwise.'''
assert len(sequences) % 2 == 0
result = []
for i in xrange(len(sequences)/2):
h1 = sequences[2*i]
h2 = sequences[2*i+1]
result.append(zip(h1,h2))
return result
def _diploidToHaploid(sequences):
'''Translates a list of diploid sequences into a list of twice as
many haploid sequences.'''
result = []
for s in sequences:
result.append([x[0] for x in s])
result.append([x[1] for x in s])
return result
class DiseaseModel(object):
'''Abstract class for disease modelling.'''
HAPLOTYPE_MODEL = 1
GENOTYPE_MODEL = 2
def __init__(self, indices, model=HAPLOTYPE_MODEL, predicate=None):
'''Creates a disease model where the indices determines the
disease affecting markers and the mode is either
HAPLOTYPE_MODEL (for haploid data) or GENOTYPE_MODEL (for
diploid data).
If predicate is given, it must be a function that determines
disease status (returns True or False), dependent on the
alleles (or pairs of alleles for GENOTYPE_MODEL) passed to it
for each sequence.'''
if isinstance(indices,int): self.indices = [indices]
else: self.indices = indices
self.model = model
self.predicate = predicate
def __call__(self, *args):
if self.predicate is not None:
return self.predicate(*args)
raise NotImplementedError()
def singleMarkerDisease(markerIndex,
wildTypeRisk=0.0, mutantRisk=1.0,
homozygoteWildTypeRisk=0.0,
heterozygoteRisk=0.5,
homozygoteMutantRisk=1.0,
model=DiseaseModel.HAPLOTYPE_MODEL):
'''Builds a simple disease model, where only a single marker
affects the disease status, and where 0 assumed to be the
wild-type allele and any other allele a mutant allele (all with
the same disease risk).
The disease affecting marker is given by markerIndex, and where
the wild-type and mutant allelic risks are given by probabilities
of disease.
If the sequences are haploid (model is
DiseaseModel.HAPLOTYPE_MODEL), the probabilities wildTypeRisk and
mutantRisk are used; if the sequences are diploid (model is
DiseaseModel.GENOTYPE_MODEL), the probabilities
homozygoteWildTypeRisk, heterozygoteRisk, and homozygoteMutantRisk
are used.
'''
class Model(DiseaseModel):
def __init__(self):
DiseaseModel.__init__(self,[markerIndex],model)
if model == DiseaseModel.HAPLOTYPE_MODEL:
def __call__(self, allele):
if allele==0: return _uniform(0.0,1.0) < wildTypeRisk
else: return _uniform(0.0,1.0) < mutantRisk
else:
def __call__(self, (a1,a2)):
if (a1,a2) == (0,0):
return _uniform(0.0,1.0) < homozygoteWildTypeRisk
elif a1 == 0 or a2 == 0:
return _uniform(0.0,1.0) < heterozygoteRisk
else:
return _uniform(0.0,1.0) < homozygoteMutantRisk
return Model()
def dominantModel(markerIndex):
'''Builds a dominant diploid disease model for a single marker.'''
return singleMarkerDisease(markerIndex,
model=DiseaseModel.GENOTYPE_MODEL,
homozygoteWildTypeRisk=0,
heterozygoteRisk=1,
homozygoteMutantRisk=1)
def recessiveModel(markerIndex):
'''Builds a recessive diploid disease model for a single marker.'''
return singleMarkerDisease(markerIndex,
model=DiseaseModel.GENOTYPE_MODEL,
homozygoteWildTypeRisk=0,
heterozygoteRisk=0,
homozygoteMutantRisk=1)
def split(diseaseModel, sequences, keepIndices=False):
'''Split the list of sequences into two lists, based on the
disease model, the first list consisting of affected individuals,
the second on unaffected.
Unless keepIndices is set to True, the disease affecting markers,
as determined by diseaseModel, are removed from the resulting
sequences.'''
if diseaseModel.model == DiseaseModel.GENOTYPE_MODEL:
diploid = _haploidToDiploid(sequences)
cases,controls = _splitSequences(diseaseModel, diseaseModel.indices,
diploid, keepIndices)
return _diploidToHaploid(cases),_diploidToHaploid(controls)
else:
return _splitSequences(diseaseModel, diseaseModel.indices,
sequences, keepIndices)
if __name__ == '__main__':
lst = range(10)
assert _prj((0,2,4),lst) == [0,2,4]
assert _cut((0,2,4),lst) == [1,3,5,6,7,8,9]
seqs = [[0,0,1,1], [1,0,1,1], [0,0,0,0], [1,0,0,0]]
lst1,lst2 = _splitSequences(lambda a0: a0==1, [0], seqs, True)
assert lst1 == [seqs[1],seqs[3]]
assert lst2 == [seqs[0],seqs[2]]
lst1,lst2 = _splitSequences(lambda a0: a0==1, [0], seqs, False)
assert lst1 == [_cut([0],seqs[1]),_cut([0],seqs[3])]
assert lst2 == [_cut([0],seqs[0]),_cut([0],seqs[2])]
lst1,lst2 = _splitSequences(lambda a0,a2: a0==a2, (0,2), seqs, True)
assert lst1 == [seqs[1],seqs[2]]
assert lst2 == [seqs[0],seqs[3]]
lst1,lst2 = _splitSequences(lambda a0,a2: a0==a2, (0,2), seqs, False)
assert lst1 == [_cut((0,2),seqs[1]),_cut((0,2),seqs[2])]
assert lst2 == [_cut((0,2),seqs[0]),_cut((0,2),seqs[3])]
assert _haploidToDiploid(seqs) == [[(0,1), (0,0), (1,1), (1,1)],
[(0,1), (0,0), (0,0), (0,0)]]
assert seqs == _diploidToHaploid(_haploidToDiploid(seqs))
model = singleMarkerDisease(0)
lst1,lst2 = _splitSequences(model, [0], seqs, True)
assert lst1 == [seqs[1],seqs[3]]
assert lst2 == [seqs[0],seqs[2]]
lst1,lst2 = _splitSequences(model, [0], seqs, False)
assert lst1 == [_cut([0],seqs[1]),_cut([0],seqs[3])]
assert lst2 == [_cut([0],seqs[0]),_cut([0],seqs[2])]
lst1,lst2 = split(model, seqs, True)
assert lst1 == [seqs[1],seqs[3]]
assert lst2 == [seqs[0],seqs[2]]
lst1,lst2 = split(model, seqs)
assert lst1 == [_cut([0],seqs[1]),_cut([0],seqs[3])]
assert lst2 == [_cut([0],seqs[0]),_cut([0],seqs[2])]
lst1,lst2 = split(dominantModel(0),seqs,True)
assert lst1 == seqs
assert lst2 == []
lst1,lst2 = split(recessiveModel(3),seqs,True)
assert lst1 == seqs[:2]
assert lst2 == seqs[2:]
try:
split(DiseaseModel(0), seqs)
assert False
except NotImplementedError:
pass
af, unaf = split(DiseaseModel(0,predicate=lambda a: True), seqs, True)
assert af == seqs
assert unaf == []
af, unaf = split(DiseaseModel([0,1],predicate=lambda a,b: True), seqs, True)
assert af == seqs
assert unaf == []
def pred(a0,a2): return (a0,a2) == (1,1)
af, unaf = split(DiseaseModel([0,2],predicate=pred), seqs)
assert af == [[0,1]]
assert unaf == [[0,1],[0,0],[0,0]]
class DM(DiseaseModel):
def __call__(self, a0, a2):
return (a0,a2) == (1,1)
af, unaf = split(DM([0,2]), seqs)
assert af == [[0,1]]
assert unaf == [[0,1],[0,0],[0,0]]
def pred(p0,p2):
a00, a02 = p0
a20, a22 = p2
return a00!=a20 and a02==a22
dm = DiseaseModel([0,2],predicate=pred,
model=DiseaseModel.GENOTYPE_MODEL)
af, unaf = split(dm, seqs)
assert af == [[0,1],[0,1]]
assert unaf == [[0,0],[0,0]]
class DM(DiseaseModel):
def __init__(self, indices):
DiseaseModel.__init__(self,indices,
model=DiseaseModel.GENOTYPE_MODEL)
def __call__(self, p0, p2):
a00, a02 = p0
a20, a22 = p2
return a00!=a20 and a02==a22
af, unaf = split(DM([0,2]), seqs)
assert af == [[0,1],[0,1]]
assert unaf == [[0,0],[0,0]]
|
nive-cms/nive
|
refs/heads/master
|
nive/helper.py
|
1
|
#----------------------------------------------------------------------
# Copyright 2012, 2013 Arndt Droullier, Nive GmbH. All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#----------------------------------------------------------------------
__doc__ = ""
import json
import os
from datetime import datetime
from pyramid.path import DottedNameResolver
from pyramid.path import AssetResolver
from pyramid.path import caller_package
from nive.definitions import IAppConf, IDatabaseConf, IFieldConf, IRootConf, IObjectConf, IViewModuleConf
from nive.definitions import IViewConf, IToolConf, IPortalConf, IGroupConf, ICategoryConf, IModuleConf
from nive.definitions import IWidgetConf, IWfProcessConf, IWfStateConf, IWfTransitionConf, IConf
from nive.definitions import baseConf
from nive.definitions import implements, ConfigurationError
from nive.utils.dataPool2.files import File
from nive.utils.utils import LoadFromFile
def ResolveName(name, base=None, raiseExcp=True):
"""
Lookup python object by dotted python name.
Wraps pyramid.DottedNameResolver.
returns object or None
"""
if not name:
return None
if not isinstance(name, basestring):
return name
if not base:
base = caller_package()
if not raiseExcp:
d = DottedNameResolver(base)
return d.maybe_resolve(name)
d = DottedNameResolver(base)
return d.resolve(name)
def ResolveAsset(name, base=None, raiseExcp=True):
"""
Lookup asset path (template, json or any other file) and returns asset
descriptor object or None.
"""
if not name:
return None
if not isinstance(name, basestring):
return name
if not base:
base = caller_package()
if name.startswith("./"):
# use relative file system path
name = os.getcwd()+name[1:]
if not raiseExcp:
try:
d = AssetResolver(base)
return d.resolve(name)
except:
return None
d = AssetResolver(base)
return d.resolve(name)
def ResolveConfiguration(conf, base=None):
"""
Lookup configuration object by dotted python name. Returns interface and configuration object.
Extends pyramid.DottedNameResolver with .json file support for configuration objects.
Supports the following cases:
- Path and file name to .json file. requires `type` set to one of the
configuration types:
*AppConf, FieldConf, DatabaseConf, RootConf, ObjectConf, ViewModuleConf, ViewConf, ToolConf,
GroupConf, CategoryConf*
- Dotted python name for configuration object including attribute name of configuration instance.
- Dotted python name for object. Uses the convention to load the configuration from the
'configuration' attribute of the referenced object.
- Configuration instance. Will just return it.
returns Interface, configuration
"""
# string instance
if isinstance(conf, basestring):
if not base:
base = caller_package()
# json file
if conf.find(".json")!= -1:
path = ResolveAsset(conf, base)
s = LoadFromFile(path.abspath())
conf = json.loads(s)
# resolve attribute name
elif conf:
c = ResolveName(conf, base=base)
if hasattr(c, "configuration"):
conf = c.configuration
else:
conf = c
# dict instance
if isinstance(conf, dict):
# load by interface
if not "type" in conf:
raise TypeError, "Configuration type not defined"
c = ResolveName(conf["type"], base="nive")
del conf["type"]
conf = c(**conf)
# module and not configuration
if not IConf.providedBy(conf):
if hasattr(conf, "configuration"):
conf = conf.configuration
# object instance
if IAppConf.providedBy(conf): return IAppConf, conf
if IDatabaseConf.providedBy(conf): return IDatabaseConf, conf
if IFieldConf.providedBy(conf): return IFieldConf, conf
if IRootConf.providedBy(conf): return IRootConf, conf
if IObjectConf.providedBy(conf): return IObjectConf, conf
if IViewModuleConf.providedBy(conf): return IViewModuleConf, conf
if IViewConf.providedBy(conf): return IViewConf, conf
if IToolConf.providedBy(conf): return IToolConf, conf
if IPortalConf.providedBy(conf): return IPortalConf, conf
if IGroupConf.providedBy(conf): return IGroupConf, conf
if ICategoryConf.providedBy(conf): return ICategoryConf, conf
if IModuleConf.providedBy(conf): return IModuleConf, conf
if IWidgetConf.providedBy(conf): return IWidgetConf, conf
if IWfProcessConf.providedBy(conf): return IWfProcessConf, conf
if IWfStateConf.providedBy(conf): return IWfStateConf, conf
if IWfTransitionConf.providedBy(conf): return IWfTransitionConf, conf
if IConf.providedBy(conf): return IConf, conf
return None, conf
#raise TypeError, "Unknown configuration object: %s" % (str(conf))
def LoadConfiguration(conf, base=None):
"""
same as ResolveConfiguration except only the configuration object is
returned
"""
if not base:
base = caller_package()
i,c = ResolveConfiguration(conf, base)
return c
def FormatConfTestFailure(report, fmt="text"):
"""
Format configuration test() failure
returns string
"""
v=[]
for r in report:
v+= u"-----------------------------------------------------------------------------------\r\n"
v+= str(r[0]) + " " + r[1] + "\r\n"
v+= u"-----------------------------------------------------------------------------------\r\n"
for d in r[2].__dict__.items():
a = d[1]
if a==None:
try:
a = r[2].parent.get(d[0])
except:
pass
v+= str(d[0])+u": "+str(a)+u"\r\n"
v+= u"\r\n"
return "".join(v)
class ConfEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, baseConf):
values = {}
for k in obj:
values[k] = obj[k]
return values
return json.JSONEncoder.default(self, obj)
class ConfDecoder(object):
def decode(self, jsonstring):
def object_hook(obj):
if isinstance(obj, dict):
try:
confclass = obj["ccc"]
except:
return obj
if not confclass:
raise ConfigurationError, "Configuration class not found (ccc)"
conf = ResolveName(confclass, base="nive")(**obj)
return conf
return obj
return json.JSONDecoder(object_hook=object_hook).decode(jsonstring)
def DumpJSONConf(conf):
# dump configuration to json
values = {}
for k in conf:
v = conf[k]
if isinstance(v, baseConf):
values[k] = DumpJSONConf(v)
elif isinstance(v, datetime):
values[k] = str(v)
else:
values[k] = v
return json.dumps(values)
def LoadJSONConf(jsondata, default=None):
# jsondata must be a json string or dictionary
# load from json
# default: the default configuration class to be used if the json values do not
# specify the class as `ccc`
if isinstance(jsondata, basestring):
try:
jsondata = json.loads(jsondata)
except:
return jsondata
if not isinstance(jsondata, dict):
return jsondata
for k,v in jsondata.items():
jsondata[k] = LoadJSONConf(v, default=default)
confclass = jsondata.get("ccc")
if not confclass:
if not default:
raise ConfigurationError, "Configuration class not found (ccc)"
return default(**jsondata)
conf = ResolveName(confclass, base="nive")(**jsondata)
return conf
def ClassFactory(configuration, reloadClass=False, raiseError=True, base=None):
"""
Creates a python class reference from configuration. Uses configuration.context as class
and dynamically adds classes listed as configuration.extensions as base classes.
configuration requires
- configuration.context
- configuration.extensions [optional]
If reloadClass = False the class is cached as configuration._v_class.
"""
if not reloadClass:
try:
return configuration._v_class
except:
pass
tag = configuration.context
if "extensions" in configuration:
bases = configuration.extensions
else:
bases = None
cls = GetClassRef(tag, reloadClass, raiseError, base)
if not cls:
return None
def cacheCls(configuration, cls):
# store type() class
lock = configuration.locked
if lock:
configuration.unlock()
configuration._v_class = cls
if lock:
configuration.lock()
if not bases:
cacheCls(configuration, cls)
return cls
# load extensions
b = [cls]
#opt
for r in bases:
r = GetClassRef(r, reloadClass, raiseError, base)
if not r:
continue
b.append(r)
if len(b)==1:
return cls
# create new class with name configuration.context
cls = type("_factory_"+cls.__name__, tuple(b), {})
cacheCls(configuration, cls)
return cls
def GetClassRef(tag, reloadClass=False, raiseError=True, base=None):
"""
Resolve class reference from python dotted string.
"""
if isinstance(tag, basestring):
if raiseError:
classRef = ResolveName(tag, base=base)
else:
try:
classRef = ResolveName(tag, base=base)
except ImportError, e:
return None
if not classRef:
return None
#if reloadClass:
# reload(classRef)
return classRef
# tag is class ref
return tag
# test request and response --------------------------------------
class Response(object):
headerlist = []
class Request(object):
POST = {}
GET = {}
url = ""
username = ""
response = Response()
environ = {}
class Event(object):
request = Request()
class FakeLocalizer(object):
def translate(self, text):
try:
if text.mapping:
v = unicode(text)
for k in text.mapping:
v = v.replace(u"${%s}"%k, unicode(text.mapping[k]))
return v
except:
pass
return text
|
sergeii/swat-motd
|
refs/heads/master
|
fabfile/server.py
|
12
|
# -*- coding: utf-8 -*-
from fabric.api import *
from .settings import env
from .utils import here, checkout, edit_ini
_kits = env.kits.keys()
@task
@roles('server')
def all():
"""Set up the compiled packages on a clean server, then launch it."""
setup()
install()
launch()
@task
@roles('server')
def setup():
"""Set up a SWAT 4 test server."""
checkout(env.server['git'], env.server['path'])
@task
@roles('server')
def install(kits=_kits):
"""Install the compiled packages on a test server."""
with quiet():
# configure separate servers for every listed kit
for kit in kits:
with cd(env.server['path'].child(env.kits[kit]['content'], 'System')):
# transfer compiled packages
for package, _ in env.ucc['packages']:
put(env.paths['compiled'].child(kit, '{}.u'.format(package)), '.')
# edit Swat4DedicatedServer.ini
with edit_ini(env.kits[kit]['ini']) as ini:
for section, lines in env.server['settings'].items():
# append extra lines to a section
if section[0] == '+':
ini.append_unique(section[1:], *lines)
# set/replace section
else:
ini.replace(section, *lines)
@task
@roles('server')
def launch(kits=_kits):
"""Run a swat demo server."""
# configure a separate server for every listed kit
for kit in kits:
puts('Starting {}'.format(env.kits[kit]['server']))
with cd(env.server['path'].child(env.kits[kit]['content'], 'System')):
run('DISPLAY=:0 screen -d -m wine {}'.format(env.kits[kit]['server']))
if prompt('Stop the servers?', default='y').lower().startswith('y'):
kill(kits)
@task
@roles('server')
def kill(kits=_kits):
"""Stop all Swat4DedicatedServer(X).exe processes."""
for kit in kits:
puts('Stopping {}'.format(env.kits[kit]['server']))
with quiet():
run('killall {}'.format(env.kits[kit]['server']))
|
shollen/evennia
|
refs/heads/master
|
evennia/comms/models.py
|
2
|
"""
Models for the in-game communication system.
The comm system could take the form of channels, but can also be
adopted for storing tells or in-game mail.
The comsystem's main component is the Message (Msg), which carries the
actual information between two parties. Msgs are stored in the
database and usually not deleted. A Msg always have one sender (a
user), but can have any number targets, both users and channels.
For non-persistent (and slightly faster) use one can also use the
TempMsg, which mimics the Msg API but without actually saving to the
database.
Channels are central objects that act as targets for Msgs. Players can
connect to channels by use of a ChannelConnect object (this object is
necessary to easily be able to delete connections on the fly).
"""
from builtins import object
from django.conf import settings
from django.utils import timezone
from django.db import models
from evennia.typeclasses.models import TypedObject
from evennia.typeclasses.tags import Tag, TagHandler
from evennia.utils.idmapper.models import SharedMemoryModel
from evennia.comms import managers
from evennia.locks.lockhandler import LockHandler
from evennia.utils.utils import crop, make_iter, lazy_property
__all__ = ("Msg", "TempMsg", "ChannelDB")
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
#------------------------------------------------------------
#
# Msg
#
#------------------------------------------------------------
class Msg(SharedMemoryModel):
"""
A single message. This model describes all ooc messages
sent in-game, both to channels and between players.
The Msg class defines the following database fields (all
accessed via specific handler methods):
- db_sender_players: Player senders
- db_sender_objects: Object senders
- db_sender_external: External senders (defined as string names)
- db_receivers_players: Receiving players
- db_receivers_objects: Receiving objects
- db_receivers_channels: Receiving channels
- db_header: Header text
- db_message: The actual message text
- db_date_sent: time message was sent
- db_hide_from_sender: bool if message should be hidden from sender
- db_hide_from_receivers: list of receiver objects to hide message from
- db_hide_from_channels: list of channels objects to hide message from
- db_lock_storage: Internal storage of lock strings.
"""
#
# Msg database model setup
#
#
# These databse fields are all set using their corresponding properties,
# named same as the field, but withtout the db_* prefix.
# Sender is either a player, an object or an external sender, like
# an IRC channel; normally there is only one, but if co-modification of
# a message is allowed, there may be more than one "author"
db_sender_players = models.ManyToManyField("players.PlayerDB", related_name='sender_player_set',
null=True, verbose_name='sender(player)', db_index=True)
db_sender_objects = models.ManyToManyField("objects.ObjectDB", related_name='sender_object_set',
null=True, verbose_name='sender(object)', db_index=True)
db_sender_external = models.CharField('external sender', max_length=255, null=True, db_index=True,
help_text="identifier for external sender, for example a sender over an "
"IRC connection (i.e. someone who doesn't have an exixtence in-game).")
# The destination objects of this message. Stored as a
# comma-separated string of object dbrefs. Can be defined along
# with channels below.
db_receivers_players = models.ManyToManyField('players.PlayerDB', related_name='receiver_player_set',
null=True, help_text="player receivers")
db_receivers_objects = models.ManyToManyField('objects.ObjectDB', related_name='receiver_object_set',
null=True, help_text="object receivers")
db_receivers_channels = models.ManyToManyField("ChannelDB", related_name='channel_set',
null=True, help_text="channel recievers")
# header could be used for meta-info about the message if your system needs
# it, or as a separate store for the mail subject line maybe.
db_header = models.TextField('header', null=True, blank=True)
# the message body itself
db_message = models.TextField('messsage')
# send date
db_date_sent = models.DateTimeField('date sent', editable=False, auto_now_add=True, db_index=True)
# lock storage
db_lock_storage = models.TextField('locks', blank=True,
help_text='access locks on this message.')
# these can be used to filter/hide a given message from supplied objects/players/channels
db_hide_from_players = models.ManyToManyField("players.PlayerDB", related_name='hide_from_players_set', null=True)
db_hide_from_objects = models.ManyToManyField("objects.ObjectDB", related_name='hide_from_objects_set', null=True)
db_hide_from_channels = models.ManyToManyField("ChannelDB", related_name='hide_from_channels_set', null=True)
db_tags = models.ManyToManyField(Tag, null=True,
help_text='tags on this message. Tags are simple string markers to identify, group and alias messages.')
# Database manager
objects = managers.MsgManager()
_is_deleted = False
def __init__(self, *args, **kwargs):
SharedMemoryModel.__init__(self, *args, **kwargs)
self.extra_senders = []
class Meta(object):
"Define Django meta options"
verbose_name = "Message"
@lazy_property
def locks(self):
return LockHandler(self)
@lazy_property
def tags(self):
return TagHandler(self)
# Wrapper properties to easily set database fields. These are
# @property decorators that allows to access these fields using
# normal python operations (without having to remember to save()
# etc). So e.g. a property 'attr' has a get/set/del decorator
# defined that allows the user to do self.attr = value,
# value = self.attr and del self.attr respectively (where self
# is the object in question).
# sender property (wraps db_sender_*)
#@property
def __senders_get(self):
"Getter. Allows for value = self.sender"
return list(self.db_sender_players.all()) + \
list(self.db_sender_objects.all()) + \
self.extra_senders
#@sender.setter
def __senders_set(self, senders):
"Setter. Allows for self.sender = value"
for sender in make_iter(senders):
if not sender:
continue
if isinstance(sender, basestring):
self.db_sender_external = sender
self.extra_senders.append(sender)
self.save(update_fields=["db_sender_external"])
continue
if not hasattr(sender, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = sender.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_sender_objects.add(sender)
elif clsname == "PlayerDB":
self.db_sender_players.add(sender)
#@sender.deleter
def __senders_del(self):
"Deleter. Clears all senders"
self.db_sender_players.clear()
self.db_sender_objects.clear()
self.db_sender_external = ""
self.extra_senders = []
self.save()
senders = property(__senders_get, __senders_set, __senders_del)
def remove_sender(self, senders):
"""
Remove a single sender or a list of senders.
Args:
senders (Player, Object, str or list): Senders to remove.
"""
for sender in make_iter(senders):
if not sender:
continue
if isinstance(sender, basestring):
self.db_sender_external = ""
self.save(update_fields=["db_sender_external"])
if not hasattr(sender, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = sender.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_sender_objects.remove(sender)
elif clsname == "PlayerDB":
self.db_sender_players.remove(sender)
# receivers property
#@property
def __receivers_get(self):
"""
Getter. Allows for value = self.receivers.
Returns three lists of receivers: players, objects and channels.
"""
return list(self.db_receivers_players.all()) + list(self.db_receivers_objects.all())
#@receivers.setter
def __receivers_set(self, receivers):
"""
Setter. Allows for self.receivers = value.
This appends a new receiver to the message.
"""
for receiver in make_iter(receivers):
if not receiver:
continue
if not hasattr(receiver, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = receiver.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_receivers_objects.add(receiver)
elif clsname == "PlayerDB":
self.db_receivers_players.add(receiver)
#@receivers.deleter
def __receivers_del(self):
"Deleter. Clears all receivers"
self.db_receivers_players.clear()
self.db_receivers_objects.clear()
self.save()
receivers = property(__receivers_get, __receivers_set, __receivers_del)
def remove_receiver(self, receivers):
"""
Remove a single receiver or a list of receivers.
Args:
receivers (Player, Object, Channel or list): Receiver to remove.
"""
for receiver in make_iter(receivers):
if not receiver:
continue
if not hasattr(receiver, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = receiver.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_receivers_objects.remove(receiver)
elif clsname == "PlayerDB":
self.db_receivers_players.remove(receiver)
# channels property
#@property
def __channels_get(self):
"Getter. Allows for value = self.channels. Returns a list of channels."
return self.db_receivers_channels.all()
#@channels.setter
def __channels_set(self, value):
"""
Setter. Allows for self.channels = value.
Requires a channel to be added.
"""
for val in (v for v in make_iter(value) if v):
self.db_receivers_channels.add(val)
#@channels.deleter
def __channels_del(self):
"Deleter. Allows for del self.channels"
self.db_receivers_channels.clear()
self.save()
channels = property(__channels_get, __channels_set, __channels_del)
def __hide_from_get(self):
"""
Getter. Allows for value = self.hide_from.
Returns 3 lists of players, objects and channels
"""
return self.db_hide_from_players.all(), self.db_hide_from_objects.all(), self.db_hide_from_channels.all()
#@hide_from_sender.setter
def __hide_from_set(self, hiders):
"Setter. Allows for self.hide_from = value. Will append to hiders"
for hider in make_iter(hiders):
if not hider:
continue
if not hasattr(hider, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = hider.__dbclass__.__name__
if clsname == "PlayerDB":
self.db_hide_from_players.add(hider.__dbclass__)
elif clsname == "ObjectDB":
self.db_hide_from_objects.add(hider.__dbclass__)
elif clsname == "ChannelDB":
self.db_hide_from_channels.add(hider.__dbclass__)
#@hide_from_sender.deleter
def __hide_from_del(self):
"Deleter. Allows for del self.hide_from_senders"
self.db_hide_from_players.clear()
self.db_hide_from_objects.clear()
self.db_hide_from_channels.clear()
self.save()
hide_from = property(__hide_from_get, __hide_from_set, __hide_from_del)
#
# Msg class methods
#
def __str__(self):
"This handles what is shown when e.g. printing the message"
senders = ",".join(obj.key for obj in self.senders)
receivers = ",".join(["[%s]" % obj.key for obj in self.channels] + [obj.key for obj in self.receivers])
return "%s->%s: %s" % (senders, receivers, crop(self.message, width=40))
#------------------------------------------------------------
#
# TempMsg
#
#------------------------------------------------------------
class TempMsg(object):
"""
This is a non-persistent object for sending temporary messages
that will not be stored. It mimics the "real" Msg object, but
doesn't require sender to be given.
"""
def __init__(self, senders=None, receivers=None, channels=None, message="", header="", type="", lockstring="", hide_from=None):
"""
Creates the temp message.
Args:
senders (any or list, optional): Senders of the message.
receivers (Player, Object, Channel or list, optional): Receivers of this message.
channels (Channel or list, optional): Channels to send to.
message (str, optional): Message to send.
header (str, optional): Header of message.
type (str, optional): Message class, if any.
lockstring (str, optional): Lock for the message.
hide_from (Player, Object, Channel or list, optional): Entities to hide this message from.
"""
self.senders = senders and make_iter(senders) or []
self.receivers = receivers and make_iter(receivers) or []
self.channels = channels and make_iter(channels) or []
self.type = type
self.header = header
self.message = message
self.lock_storage = lockstring
self.hide_from = hide_from and make_iter(hide_from) or []
self.date_sent = timezone.now()
@lazy_property
def locks(self):
return LockHandler(self)
def __str__(self):
"""
This handles what is shown when e.g. printing the message.
"""
senders = ",".join(obj.key for obj in self.senders)
receivers = ",".join(["[%s]" % obj.key for obj in self.channels] + [obj.key for obj in self.receivers])
return "%s->%s: %s" % (senders, receivers, crop(self.message, width=40))
def remove_sender(self, sender):
"""
Remove a sender or a list of senders.
Args:
sender (Object, Player, str or list): Senders to remove.
"""
for o in make_iter(sender):
try:
self.senders.remove(o)
except ValueError:
pass # nothing to remove
def remove_receiver(self, receiver):
"""
Remove a receiver or a list of receivers
Args:
receiver (Object, Player, Channel, str or list): Receivers to remove.
"""
for o in make_iter(receiver):
try:
self.senders.remove(o)
except ValueError:
pass # nothing to remove
def access(self, accessing_obj, access_type='read', default=False):
"""
Checks lock access.
Args:
accessing_obj (Object or Player): The object trying to gain access.
access_type (str, optional): The type of lock access to check.
default (bool): Fallback to use if `access_type` lock is not defined.
Returns:
result (bool): If access was granted or not.
"""
return self.locks.check(accessing_obj,
access_type=access_type, default=default)
#------------------------------------------------------------
#
# Channel
#
#------------------------------------------------------------
class SubscriptionHandler(object):
"""
This handler manages subscriptions to the
channel and hides away which type of entity is
subscribing (Player or Object)
"""
def __init__(self, obj):
"""
Initialize the handler
Attr:
obj (ChannelDB): The channel the handler sits on.
"""
self.obj = obj
def has(self, entity):
"""
Check if the given entity subscribe to this channel
Args:
entity (str, Player or Object): The entity to return. If
a string, it assumed to be the key or the #dbref
of the entity.
Returns:
subscriber (Player, Object or None): The given
subscriber.
"""
clsname = entity.__dbclass__.__name__
if clsname == "PlayerDB":
return entity in self.obj.db_subscriptions.all()
elif clsname == "ObjectDB":
return entity in self.obj.db_object_subscriptions.all()
def add(self, entity):
"""
Subscribe an entity to this channel.
Args:
entity (Player, Object or list): The entity or
list of entities to subscribe to this channel.
Note:
No access-checking is done here, this must have
been done before calling this method. Also
no hooks will be called.
"""
for subscriber in make_iter(entity):
if subscriber:
clsname = subscriber.__dbclass__.__name__
# chooses the right type
if clsname == "ObjectDB":
self.obj.db_object_subscriptions.add(subscriber)
elif clsname == "PlayerDB":
self.obj.db_subscriptions.add(subscriber)
def remove(self, entity):
"""
Remove a subecriber from the channel.
Args:
entity (Player, Object or list): The entity or
entities to un-subscribe from the channel.
"""
for subscriber in make_iter(entity):
if subscriber:
clsname = subscriber.__dbclass__.__name__
# chooses the right type
if clsname == "PlayerDB":
self.obj.db_subscriptions.remove(entity)
elif clsname == "ObjectDB":
self.obj.db_object_subscriptions.remove(entity)
def all(self):
"""
Get all subscriptions to this channel.
Returns:
subscribers (list): The subscribers. This
may be a mix of Players and Objects!
"""
return list(self.obj.db_subscriptions.all()) + \
list(self.obj.db_object_subscriptions.all())
def clear(self):
"""
Remove all subscribers from channel.
"""
self.obj.db_subscriptions.clear()
self.obj.db_object_subscriptions.clear()
class ChannelDB(TypedObject):
"""
This is the basis of a comm channel, only implementing
the very basics of distributing messages.
The Channel class defines the following database fields
beyond the ones inherited from TypedObject:
- db_subscriptions: The Player subscriptions (this is the most
usual case, named this way for legacy.
- db_object_subscriptions: The Object subscriptions.
"""
db_subscriptions = models.ManyToManyField("players.PlayerDB",
related_name="subscription_set", null=True, verbose_name='subscriptions', db_index=True)
db_object_subscriptions = models.ManyToManyField("objects.ObjectDB",
related_name="object_subscription_set", null=True, verbose_name='subscriptions', db_index=True)
# Database manager
objects = managers.ChannelDBManager()
__settingclasspath__ = settings.BASE_CHANNEL_TYPECLASS
__defaultclasspath__ = "evennia.comms.comms.DefaultChannel"
__applabel__ = "comms"
class Meta(object):
"Define Django meta options"
verbose_name = "Channel"
verbose_name_plural = "Channels"
def __str__(self):
"Echoes the text representation of the channel."
return "Channel '%s' (%s)" % (self.key, self.db.desc)
@lazy_property
def subscriptions(self):
return SubscriptionHandler(self)
|
SebDieBln/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/grass7/ext/v_net_arcs.py
|
3
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
v_net_arcs.py
---------------------
Date : December 2015
Copyright : (C) 2015 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'December 2015'
__copyright__ = '(C) 2015, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from v_net import variableOutput
def processOutputs(alg):
outputParameter = {u"output": [u"line", 1]}
variableOutput(alg, outputParameter)
|
snailhu/myself-Repository
|
refs/heads/master
|
SmartDataApp/migrations/0048_auto__add_notification__del_field_community_notification_time__del_fie.py
|
1
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Notification'
db.create_table(u'SmartDataApp_notification', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('notification_content', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('notification_time', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('notification_theme', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('notification_community', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['SmartDataApp.Community'], null=True)),
))
db.send_create_signal(u'SmartDataApp', ['Notification'])
# Deleting field 'Community.notification_time'
db.delete_column(u'SmartDataApp_community', 'notification_time')
# Deleting field 'Community.notification_theme'
db.delete_column(u'SmartDataApp_community', 'notification_theme')
# Deleting field 'Community.notification_content'
db.delete_column(u'SmartDataApp_community', 'notification_content')
def backwards(self, orm):
# Deleting model 'Notification'
db.delete_table(u'SmartDataApp_notification')
# Adding field 'Community.notification_time'
db.add_column(u'SmartDataApp_community', 'notification_time',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
# Adding field 'Community.notification_theme'
db.add_column(u'SmartDataApp_community', 'notification_theme',
self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True),
keep_default=False)
# Adding field 'Community.notification_content'
db.add_column(u'SmartDataApp_community', 'notification_content',
self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True),
keep_default=False)
models = {
u'SmartDataApp.community': {
'Meta': {'object_name': 'Community'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'SmartDataApp.complaints': {
'Meta': {'object_name': 'Complaints'},
'author': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'author_detail': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.ProfileDetail']", 'null': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'content': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'handler': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_worker_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pleased': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pleased_reason': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'src': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'SmartDataApp.express': {
'Meta': {'object_name': 'Express'},
'allowable_get_express_time': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'arrive_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.ProfileDetail']", 'null': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'get_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'handler': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_worker_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pleased': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pleased_reason': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'status': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'submit_express_status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'SmartDataApp.housekeeping': {
'Meta': {'object_name': 'Housekeeping'},
'allow_deal_time': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.ProfileDetail']", 'null': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'handler': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'housekeeping_item': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Housekeeping_items']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_worker_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pleased': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pleased_reason': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'SmartDataApp.housekeeping_items': {
'Meta': {'object_name': 'Housekeeping_items'},
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'price': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'price_description': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'remarks': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'})
},
u'SmartDataApp.notification': {
'Meta': {'object_name': 'Notification'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'notification_content': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'notification_theme': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'notification_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'SmartDataApp.picture': {
'Meta': {'object_name': 'Picture'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keep': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'like': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'src': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True'}),
'timestamp_add': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'SmartDataApp.profiledetail': {
'Meta': {'object_name': 'ProfileDetail'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'car_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'device_chanel_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True'}),
'device_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True'}),
'device_user_id': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '250', 'null': 'True'}),
'floor': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'gate_card': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '11', 'null': 'True'}),
'profile': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'SmartDataApp.repair': {
'Meta': {'object_name': 'Repair'},
'author': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'author_detail': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.ProfileDetail']", 'null': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
'content': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'blank': 'True'}),
'handler': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_worker_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pleased': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pleased_reason': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repair_item': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'src': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'SmartDataApp.repair_item': {
'Meta': {'object_name': 'Repair_item'},
'community': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Community']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'SmartDataApp.transaction': {
'Meta': {'object_name': 'Transaction'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'grade_num': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'money_num': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '19', 'decimal_places': '6'}),
'remark': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'wallet_profile': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.Wallet']", 'null': 'True'})
},
u'SmartDataApp.wallet': {
'Meta': {'object_name': 'Wallet'},
'grade_sum': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'money_sum': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '19', 'decimal_places': '6'}),
'user_profile': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['SmartDataApp.ProfileDetail']", 'null': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['SmartDataApp']
|
Korkki/django
|
refs/heads/master
|
tests/migrations/migrations_test_apps/unspecified_app_with_conflict/migrations/0002_second.py
|
425
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("unspecified_app_with_conflict", "0001_initial")]
operations = [
migrations.DeleteModel("Tribble"),
migrations.RemoveField("Author", "silly_field"),
migrations.AddField("Author", "rating", models.IntegerField(default=0)),
migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
],
)
]
|
hasadna/django
|
refs/heads/master
|
django/contrib/gis/gdal/tests/test_srs.py
|
351
|
from django.contrib.gis.gdal import SpatialReference, CoordTransform, OGRException, SRSException
from django.utils import unittest
class TestSRS:
def __init__(self, wkt, **kwargs):
self.wkt = wkt
for key, value in kwargs.items():
setattr(self, key, value)
# Some Spatial Reference examples
srlist = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
proj='+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ',
epsg=4326, projected=False, geographic=True, local=False,
lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'GEOGCS' : ('EPSG', '4326'), 'spheroid' : ('EPSG', '7030')},
attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'),('primem|authority', 'EPSG'),),
),
TestSRS('PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
proj=None, epsg=32140, projected=True, geographic=False, local=False,
lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'PROJCS' : ('EPSG', '32140'), 'spheroid' : ('EPSG', '7019'), 'unit' : ('EPSG', '9001'),},
attr=(('DATUM', 'North_American_Datum_1983'),(('SPHEROID', 2), '298.257222101'),('PROJECTION','Lambert_Conformal_Conic_2SP'),),
),
TestSRS('PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Foot_US",0.3048006096012192]]',
proj=None, epsg=None, projected=True, geographic=False, local=False,
lin_name='Foot_US', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199,
auth={'PROJCS' : (None, None),},
attr=(('PROJCS|GeOgCs|spheroid', 'GRS_1980'),(('projcs', 9), 'UNIT'), (('projcs', 11), None),),
),
# This is really ESRI format, not WKT -- but the import should work the same
TestSRS('LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]',
esri=True, proj=None, epsg=None, projected=False, geographic=False, local=True,
lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
attr=(('LOCAL_DATUM', 'Local Datum'), ('unit', 'Meter')),
),
)
# Well-Known Names
well_known = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', wk='WGS84', name='WGS 84', attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84'))),
TestSRS('GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4322"]]', wk='WGS72', name='WGS 72', attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72'))),
TestSRS('GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]]', wk='NAD27', name='NAD27', attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866'))),
TestSRS('GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]]', wk='NAD83', name='NAD83', attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980'))),
TestSRS('PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",DATUM["New_Zealand_Geodetic_Datum_1949",SPHEROID["International 1924",6378388,297,AUTHORITY["EPSG","7022"]],TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.28991152777778],PARAMETER["central_meridian",172.1090281944444],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]', wk='EPSG:27216', name='NZGD49 / Karamea Circuit', attrs=(('PROJECTION','Transverse_Mercator'), ('SPHEROID', 'International 1924'))),
)
bad_srlist = ('Foobar', 'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',)
class SpatialRefTest(unittest.TestCase):
def test01_wkt(self):
"Testing initialization on valid OGC WKT."
for s in srlist:
srs = SpatialReference(s.wkt)
def test02_bad_wkt(self):
"Testing initialization on invalid WKT."
for bad in bad_srlist:
try:
srs = SpatialReference(bad)
srs.validate()
except (SRSException, OGRException):
pass
else:
self.fail('Should not have initialized on bad WKT "%s"!')
def test03_get_wkt(self):
"Testing getting the WKT."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.wkt, srs.wkt)
def test04_proj(self):
"Test PROJ.4 import and export."
for s in srlist:
if s.proj:
srs1 = SpatialReference(s.wkt)
srs2 = SpatialReference(s.proj)
self.assertEqual(srs1.proj, srs2.proj)
def test05_epsg(self):
"Test EPSG import."
for s in srlist:
if s.epsg:
srs1 = SpatialReference(s.wkt)
srs2 = SpatialReference(s.epsg)
srs3 = SpatialReference(str(s.epsg))
srs4 = SpatialReference('EPSG:%d' % s.epsg)
for srs in (srs1, srs2, srs3, srs4):
for attr, expected in s.attr:
self.assertEqual(expected, srs[attr])
def test07_boolean_props(self):
"Testing the boolean properties."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.projected, srs.projected)
self.assertEqual(s.geographic, srs.geographic)
def test08_angular_linear(self):
"Testing the linear and angular units routines."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.ang_name, srs.angular_name)
self.assertEqual(s.lin_name, srs.linear_name)
self.assertAlmostEqual(s.ang_units, srs.angular_units, 9)
self.assertAlmostEqual(s.lin_units, srs.linear_units, 9)
def test09_authority(self):
"Testing the authority name & code routines."
for s in srlist:
if hasattr(s, 'auth'):
srs = SpatialReference(s.wkt)
for target, tup in s.auth.items():
self.assertEqual(tup[0], srs.auth_name(target))
self.assertEqual(tup[1], srs.auth_code(target))
def test10_attributes(self):
"Testing the attribute retrieval routines."
for s in srlist:
srs = SpatialReference(s.wkt)
for tup in s.attr:
att = tup[0] # Attribute to test
exp = tup[1] # Expected result
self.assertEqual(exp, srs[att])
def test11_wellknown(self):
"Testing Well Known Names of Spatial References."
for s in well_known:
srs = SpatialReference(s.wk)
self.assertEqual(s.name, srs.name)
for tup in s.attrs:
if len(tup) == 2:
key = tup[0]
exp = tup[1]
elif len(tup) == 3:
key = tup[:2]
exp = tup[2]
self.assertEqual(srs[key], exp)
def test12_coordtransform(self):
"Testing initialization of a CoordTransform."
target = SpatialReference('WGS84')
for s in srlist:
if s.proj:
ct = CoordTransform(SpatialReference(s.wkt), target)
def test13_attr_value(self):
"Testing the attr_value() method."
s1 = SpatialReference('WGS84')
self.assertRaises(TypeError, s1.__getitem__, 0)
self.assertRaises(TypeError, s1.__getitem__, ('GEOGCS', 'foo'))
self.assertEqual('WGS 84', s1['GEOGCS'])
self.assertEqual('WGS_1984', s1['DATUM'])
self.assertEqual('EPSG', s1['AUTHORITY'])
self.assertEqual(4326, int(s1['AUTHORITY', 1]))
self.assertEqual(None, s1['FOOBAR'])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(SpatialRefTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
jsteemann/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Tools/scripts/eptags.py
|
102
|
#! /usr/bin/env python
"""Create a TAGS file for Python programs, usable with GNU Emacs.
usage: eptags pyfiles...
The output TAGS file is usable with Emacs version 18, 19, 20.
Tagged are:
- functions (even inside other defs or classes)
- classes
eptags warns about files it cannot open.
eptags will not give warnings about duplicate tags.
BUGS:
Because of tag duplication (methods with the same name in different
classes), TAGS files are not very useful for most object-oriented
python projects.
"""
import sys,re
expr = r'^[ \t]*(def|class)[ \t]+([a-zA-Z_][a-zA-Z0-9_]*)[ \t]*[:\(]'
matcher = re.compile(expr)
def treat_file(filename, outfp):
"""Append tags found in file named 'filename' to the open file 'outfp'"""
try:
fp = open(filename, 'r')
except:
sys.stderr.write('Cannot open %s\n'%filename)
return
charno = 0
lineno = 0
tags = []
size = 0
while 1:
line = fp.readline()
if not line:
break
lineno = lineno + 1
m = matcher.search(line)
if m:
tag = m.group(0) + '\177%d,%d\n' % (lineno, charno)
tags.append(tag)
size = size + len(tag)
charno = charno + len(line)
outfp.write('\f\n%s,%d\n' % (filename,size))
for tag in tags:
outfp.write(tag)
def main():
outfp = open('TAGS', 'w')
for filename in sys.argv[1:]:
treat_file(filename, outfp)
if __name__=="__main__":
main()
|
40223139/203739test
|
refs/heads/master
|
static/Brython3.1.3-20150514-095342/Lib/heapq.py
|
628
|
"""Heap queue algorithm (a.k.a. priority queue).
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
Usage:
heap = [] # creates an empty heap
heappush(heap, item) # pushes a new item on the heap
item = heappop(heap) # pops the smallest item from the heap
item = heap[0] # smallest item on the heap without popping it
heapify(x) # transforms list into a heap, in-place, in linear time
item = heapreplace(heap, item) # pops and returns smallest item, and adds
# new item; the heap size is unchanged
Our API differs from textbook heap algorithms as follows:
- We use 0-based indexing. This makes the relationship between the
index for a node and the indexes for its children slightly less
obvious, but is more suitable since Python uses 0-based indexing.
- Our heappop() method returns the smallest item, not the largest.
These two make it possible to view the heap as a regular Python list
without surprises: heap[0] is the smallest item, and heap.sort()
maintains the heap invariant!
"""
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
__about__ = """Heap queues
[explanation by François Pinard]
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
The strange invariant above is meant to be an efficient memory
representation for a tournament. The numbers below are `k', not a[k]:
0
1 2
3 4 5 6
7 8 9 10 11 12 13 14
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
an usual binary tournament we see in sports, each cell is the winner
over the two cells it tops, and we can trace the winner down the tree
to see all opponents s/he had. However, in many computer applications
of such tournaments, we do not need to trace the history of a winner.
To be more memory efficient, when a winner is promoted, we try to
replace it by something else at a lower level, and the rule becomes
that a cell and the two cells it tops contain three different items,
but the top cell "wins" over the two topped cells.
If this heap invariant is protected at all time, index 0 is clearly
the overall winner. The simplest algorithmic way to remove it and
find the "next" winner is to move some loser (let's say cell 30 in the
diagram above) into the 0 position, and then percolate this new 0 down
the tree, exchanging values, until the invariant is re-established.
This is clearly logarithmic on the total number of items in the tree.
By iterating over all items, you get an O(n ln n) sort.
A nice feature of this sort is that you can efficiently insert new
items while the sort is going on, provided that the inserted items are
not "better" than the last 0'th element you extracted. This is
especially useful in simulation contexts, where the tree holds all
incoming events, and the "win" condition means the smallest scheduled
time. When an event schedule other events for execution, they are
scheduled into the future, so they can easily go into the heap. So, a
heap is a good structure for implementing schedulers (this is what I
used for my MIDI sequencer :-).
Various structures for implementing schedulers have been extensively
studied, and heaps are good for this, as they are reasonably speedy,
the speed is almost constant, and the worst case is not much different
than the average case. However, there are other representations which
are more efficient overall, yet the worst cases might be terrible.
Heaps are also very useful in big disk sorts. You most probably all
know that a big sort implies producing "runs" (which are pre-sorted
sequences, which size is usually related to the amount of CPU memory),
followed by a merging passes for these runs, which merging is often
very cleverly organised[1]. It is very important that the initial
sort produces the longest runs possible. Tournaments are a good way
to that. If, using all the memory available to hold a tournament, you
replace and percolate items that happen to fit the current run, you'll
produce runs which are twice the size of the memory for random input,
and much better for input fuzzily ordered.
Moreover, if you output the 0'th item on disk and get an input which
may not fit in the current tournament (because the value "wins" over
the last output value), it cannot fit in the heap, so the size of the
heap decreases. The freed memory could be cleverly reused immediately
for progressively building a second heap, which grows at exactly the
same rate the first heap is melting. When the first heap completely
vanishes, you switch heaps and start a new run. Clever and quite
effective!
In a word, heaps are useful memory structures to know. I use them in
a few applications, and I think it is good to keep a `heap' module
around. :-)
--------------------
[1] The disk balancing algorithms which are current, nowadays, are
more annoying than clever, and this is a consequence of the seeking
capabilities of the disks. On devices which cannot seek, like big
tape drives, the story was quite different, and one had to be very
clever to ensure (far in advance) that each tape movement will be the
most effective possible (that is, will best participate at
"progressing" the merge). Some tapes were even able to read
backwards, and this was also used to avoid the rewinding time.
Believe me, real good tape sorts were quite spectacular to watch!
From all times, sorting has always been a Great Art! :-)
"""
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
'nlargest', 'nsmallest', 'heappushpop']
from itertools import islice, count, tee, chain
def heappush(heap, item):
"""Push item onto heap, maintaining the heap invariant."""
heap.append(item)
_siftdown(heap, 0, len(heap)-1)
def heappop(heap):
"""Pop the smallest item off the heap, maintaining the heap invariant."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup(heap, 0)
else:
returnitem = lastelt
return returnitem
def heapreplace(heap, item):
"""Pop and return the current smallest value, and add the new item.
This is more efficient than heappop() followed by heappush(), and can be
more appropriate when using a fixed-size heap. Note that the value
returned may be larger than item! That constrains reasonable uses of
this routine unless written as part of a conditional replacement:
if item > heap[0]:
item = heapreplace(heap, item)
"""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup(heap, 0)
return returnitem
def heappushpop(heap, item):
"""Fast version of a heappush followed by a heappop."""
if heap and heap[0] < item:
item, heap[0] = heap[0], item
_siftup(heap, 0)
return item
def heapify(x):
"""Transform list into a heap, in-place, in O(len(x)) time."""
n = len(x)
# Transform bottom-up. The largest index there's any point to looking at
# is the largest with a child index in-range, so must have 2*i + 1 < n,
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
for i in reversed(range(n//2)):
_siftup(x, i)
def _heappushpop_max(heap, item):
"""Maxheap version of a heappush followed by a heappop."""
if heap and item < heap[0]:
item, heap[0] = heap[0], item
_siftup_max(heap, 0)
return item
def _heapify_max(x):
"""Transform list into a maxheap, in-place, in O(len(x)) time."""
n = len(x)
for i in reversed(range(n//2)):
_siftup_max(x, i)
def nlargest(n, iterable):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, reverse=True)[:n]
"""
if n < 0:
return []
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
heapify(result)
_heappushpop = heappushpop
for elem in it:
_heappushpop(result, elem)
result.sort(reverse=True)
return result
def nsmallest(n, iterable):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable)[:n]
"""
if n < 0:
return []
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
_heapify_max(result)
_heappushpop = _heappushpop_max
for elem in it:
_heappushpop(result, elem)
result.sort()
return result
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
# is the index of a leaf with a possibly out-of-order value. Restore the
# heap invariant.
def _siftdown(heap, startpos, pos):
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if newitem < parent:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
# The child indices of heap index pos are already heaps, and we want to make
# a heap at index pos too. We do this by bubbling the smaller child of
# pos up (and so on with that child's children, etc) until hitting a leaf,
# then using _siftdown to move the oddball originally at index pos into place.
#
# We *could* break out of the loop as soon as we find a pos where newitem <=
# both its children, but turns out that's not a good idea, and despite that
# many books write the algorithm that way. During a heap pop, the last array
# element is sifted in, and that tends to be large, so that comparing it
# against values starting from the root usually doesn't pay (= usually doesn't
# get us out of the loop early). See Knuth, Volume 3, where this is
# explained and quantified in an exercise.
#
# Cutting the # of comparisons is important, since these routines have no
# way to extract "the priority" from an array element, so that intelligence
# is likely to be hiding in custom comparison methods, or in array elements
# storing (priority, record) tuples. Comparisons are thus potentially
# expensive.
#
# On random arrays of length 1000, making this change cut the number of
# comparisons made by heapify() a little, and those made by exhaustive
# heappop() a lot, in accord with theory. Here are typical results from 3
# runs (3 just to demonstrate how small the variance is):
#
# Compares needed by heapify Compares needed by 1000 heappops
# -------------------------- --------------------------------
# 1837 cut to 1663 14996 cut to 8680
# 1855 cut to 1659 14966 cut to 8678
# 1847 cut to 1660 15024 cut to 8703
#
# Building the heap by using heappush() 1000 times instead required
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
# you can use it.
#
# The total compares needed by list.sort() on the same lists were 8627,
# 8627, and 8632 (this should be compared to the sum of heapify() and
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
# for sorting.
def _siftup(heap, pos):
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the smaller child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of smaller child.
rightpos = childpos + 1
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
childpos = rightpos
# Move the smaller child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown(heap, startpos, pos)
def _siftdown_max(heap, startpos, pos):
'Maxheap variant of _siftdown'
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if parent < newitem:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
def _siftup_max(heap, pos):
'Maxheap variant of _siftup'
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the larger child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of larger child.
rightpos = childpos + 1
if rightpos < endpos and not heap[rightpos] < heap[childpos]:
childpos = rightpos
# Move the larger child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown_max(heap, startpos, pos)
# If available, use C implementation
#_heapq does not exist in brython, so lets just comment it out.
#try:
# from _heapq import *
#except ImportError:
# pass
def merge(*iterables):
'''Merge multiple sorted inputs into a single sorted output.
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
does not pull the data into memory all at once, and assumes that each of
the input streams is already sorted (smallest to largest).
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
'''
_heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration
_len = len
h = []
h_append = h.append
for itnum, it in enumerate(map(iter, iterables)):
try:
next = it.__next__
h_append([next(), itnum, next])
except _StopIteration:
pass
heapify(h)
while _len(h) > 1:
try:
while True:
v, itnum, next = s = h[0]
yield v
s[0] = next() # raises StopIteration when exhausted
_heapreplace(h, s) # restore heap condition
except _StopIteration:
_heappop(h) # remove empty iterator
if h:
# fast case when only a single iterator remains
v, itnum, next = h[0]
yield v
yield from next.__self__
# Extend the implementations of nsmallest and nlargest to use a key= argument
_nsmallest = nsmallest
def nsmallest(n, iterable, key=None):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable, key=key)[:n]
"""
# Short-cut for n==1 is to use min() when len(iterable)>0
if n == 1:
it = iter(iterable)
head = list(islice(it, 1))
if not head:
return []
if key is None:
return [min(chain(head, it))]
return [min(chain(head, it), key=key)]
# When n>=size, it's faster to use sorted()
try:
size = len(iterable)
except (TypeError, AttributeError):
pass
else:
if n >= size:
return sorted(iterable, key=key)[:n]
# When key is none, use simpler decoration
if key is None:
it = zip(iterable, count()) # decorate
result = _nsmallest(n, it)
return [r[0] for r in result] # undecorate
# General case, slowest method
in1, in2 = tee(iterable)
it = zip(map(key, in1), count(), in2) # decorate
result = _nsmallest(n, it)
return [r[2] for r in result] # undecorate
_nlargest = nlargest
def nlargest(n, iterable, key=None):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
"""
# Short-cut for n==1 is to use max() when len(iterable)>0
if n == 1:
it = iter(iterable)
head = list(islice(it, 1))
if not head:
return []
if key is None:
return [max(chain(head, it))]
return [max(chain(head, it), key=key)]
# When n>=size, it's faster to use sorted()
try:
size = len(iterable)
except (TypeError, AttributeError):
pass
else:
if n >= size:
return sorted(iterable, key=key, reverse=True)[:n]
# When key is none, use simpler decoration
if key is None:
it = zip(iterable, count(0,-1)) # decorate
result = _nlargest(n, it)
return [r[0] for r in result] # undecorate
# General case, slowest method
in1, in2 = tee(iterable)
it = zip(map(key, in1), count(0,-1), in2) # decorate
result = _nlargest(n, it)
return [r[2] for r in result] # undecorate
if __name__ == "__main__":
# Simple sanity test
heap = []
data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
for item in data:
heappush(heap, item)
sort = []
while heap:
sort.append(heappop(heap))
print(sort)
import doctest
doctest.testmod()
|
vveerava/Openstack
|
refs/heads/master
|
neutron/tests/unit/ml2/drivers/cisco/apic/test_cisco_apic_topology_agent.py
|
20
|
# Copyright (c) 2014 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import mock
sys.modules["apicapi"] = mock.Mock()
from neutron.plugins.ml2.drivers.cisco.apic import apic_topology
from neutron.tests import base
from neutron.tests.unit.ml2.drivers.cisco.apic import (
test_cisco_apic_common as mocked)
NOTIFIER = ('neutron.plugins.ml2.drivers.cisco.apic.'
'apic_topology.ApicTopologyServiceNotifierApi')
RPC_CONNECTION = 'neutron.common.rpc.Connection'
AGENTS_DB = 'neutron.db.agents_db'
PERIODIC_TASK = 'neutron.openstack.common.periodic_task'
DEV_EXISTS = 'neutron.agent.linux.ip_lib.device_exists'
IP_DEVICE = 'neutron.agent.linux.ip_lib.IPDevice'
EXECUTE = 'neutron.agent.linux.utils.execute'
LLDP_CMD = ['lldpctl', '-f', 'keyvalue']
ETH0 = mocked.SERVICE_HOST_IFACE
LLDPCTL_RES = (
'lldp.' + ETH0 + '.via=LLDP\n'
'lldp.' + ETH0 + '.rid=1\n'
'lldp.' + ETH0 + '.age=0 day, 20:55:54\n'
'lldp.' + ETH0 + '.chassis.mac=' + mocked.SERVICE_HOST_MAC + '\n'
'lldp.' + ETH0 + '.chassis.name=' + mocked.SERVICE_PEER_CHASSIS_NAME + '\n'
'lldp.' + ETH0 + '.chassis.descr=' + mocked.SERVICE_PEER_CHASSIS + '\n'
'lldp.' + ETH0 + '.chassis.Bridge.enabled=on\n'
'lldp.' + ETH0 + '.chassis.Router.enabled=on\n'
'lldp.' + ETH0 + '.port.local=' + mocked.SERVICE_PEER_PORT_LOCAL + '\n'
'lldp.' + ETH0 + '.port.descr=' + mocked.SERVICE_PEER_PORT_DESC)
class TestCiscoApicTopologyService(base.BaseTestCase,
mocked.ControllerMixin,
mocked.ConfigMixin):
def setUp(self):
super(TestCiscoApicTopologyService, self).setUp()
mocked.ControllerMixin.set_up_mocks(self)
mocked.ConfigMixin.set_up_mocks(self)
# Patch notifier
notifier_c = mock.patch(NOTIFIER).start()
self.notifier = mock.Mock()
notifier_c.return_value = self.notifier
# Patch Connection
connection_c = mock.patch(RPC_CONNECTION).start()
self.connection = mock.Mock()
connection_c.return_value = self.connection
# Patch agents db
self.agents_db = mock.patch(AGENTS_DB).start()
self.service = apic_topology.ApicTopologyService()
self.service.apic_manager = mock.Mock()
def test_init_host(self):
self.service.init_host()
self.connection.create_consumer.ensure_called_once()
self.connection.consume_in_threads.ensure_called_once()
def test_update_link_add_nopeers(self):
self.service.peers = {}
args = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, mocked.APIC_EXT_SWITCH,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
self.service.update_link(None, *args)
self.service.apic_manager.add_hostlink.assert_called_once_with(*args)
self.assertEqual(args,
self.service.peers[(mocked.SERVICE_HOST,
mocked.SERVICE_HOST_IFACE)])
def test_update_link_add_with_peers_diff(self):
args = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, mocked.APIC_EXT_SWITCH,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
args_prime = args[:2] + tuple(x + '1' for x in args[2:])
self.service.peers = {args_prime[:2]: args_prime}
self.service.update_link(None, *args)
self.service.apic_manager.remove_hostlink.assert_called_once_with(
*args_prime)
self.service.apic_manager.add_hostlink.assert_called_once_with(*args)
self.assertEqual(
args, self.service.peers[
(mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE)])
def test_update_link_add_with_peers_eq(self):
args = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC,
mocked.APIC_EXT_SWITCH,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
self.service.peers = {args[:2]: args}
self.service.update_link(None, *args)
def test_update_link_rem_with_peers(self):
args = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, 0,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
self.service.peers = {args[:2]: args}
self.service.update_link(None, *args)
self.service.apic_manager.remove_hostlink.assert_called_once_with(
*args)
self.assertFalse(bool(self.service.peers))
def test_update_link_rem_no_peers(self):
args = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, 0,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
self.service.update_link(None, *args)
class TestCiscoApicTopologyAgent(base.BaseTestCase,
mocked.ControllerMixin,
mocked.ConfigMixin):
def setUp(self):
super(TestCiscoApicTopologyAgent, self).setUp()
mocked.ControllerMixin.set_up_mocks(self)
mocked.ConfigMixin.set_up_mocks(self)
# Patch notifier
notifier_c = mock.patch(NOTIFIER).start()
self.notifier = mock.Mock()
notifier_c.return_value = self.notifier
# Patch device_exists
self.dev_exists = mock.patch(DEV_EXISTS).start()
# Patch IPDevice
ipdev_c = mock.patch(IP_DEVICE).start()
self.ipdev = mock.Mock()
ipdev_c.return_value = self.ipdev
self.ipdev.link.address = mocked.SERVICE_HOST_MAC
# Patch execute
self.execute = mock.patch(EXECUTE).start()
self.execute.return_value = LLDPCTL_RES
# Patch tasks
self.periodic_task = mock.patch(PERIODIC_TASK).start()
self.agent = apic_topology.ApicTopologyAgent()
self.agent.host = mocked.SERVICE_HOST
self.agent.service_agent = mock.Mock()
self.agent.lldpcmd = LLDP_CMD
def test_init_host_device_exists(self):
self.agent.lldpcmd = None
self.dev_exists.return_value = True
self.agent.init_host()
self.assertEqual(LLDP_CMD + mocked.APIC_UPLINK_PORTS,
self.agent.lldpcmd)
def test_init_host_device_not_exist(self):
self.agent.lldpcmd = None
self.dev_exists.return_value = False
self.agent.init_host()
self.assertEqual(LLDP_CMD, self.agent.lldpcmd)
def test_get_peers(self):
self.agent.peers = {}
peers = self.agent._get_peers()
expected = [(mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, mocked.APIC_EXT_SWITCH,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)]
self.assertEqual(expected,
peers[mocked.SERVICE_HOST_IFACE])
def test_check_for_new_peers_no_peers(self):
self.agent.peers = {}
expected = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, mocked.APIC_EXT_SWITCH,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
peers = {mocked.SERVICE_HOST_IFACE: [expected]}
context = mock.Mock()
with mock.patch.object(self.agent, '_get_peers',
return_value=peers):
self.agent._check_for_new_peers(context)
self.assertEqual(expected,
self.agent.peers[mocked.SERVICE_HOST_IFACE])
self.agent.service_agent.update_link.assert_called_once_with(
context, *expected)
def test_check_for_new_peers_with_peers(self):
expected = (mocked.SERVICE_HOST, mocked.SERVICE_HOST_IFACE,
mocked.SERVICE_HOST_MAC, mocked.APIC_EXT_SWITCH,
mocked.APIC_EXT_MODULE, mocked.APIC_EXT_PORT)
peers = {mocked.SERVICE_HOST_IFACE: [expected]}
self.agent.peers = {mocked.SERVICE_HOST_IFACE:
[tuple(x + '1' for x in expected)]}
context = mock.Mock()
with mock.patch.object(self.agent, '_get_peers',
return_value=peers):
self.agent._check_for_new_peers(context)
self.agent.service_agent.update_link.assert_called_with(
context, *expected)
|
PLyczkowski/Sticky-Keymap
|
refs/heads/master
|
2.74/scripts/addons_contrib/io_directx_bel/bel/group.py
|
3
|
import bpy
'''
given name < 21
if material name exists :
naming_method = 0 blender default (increment name)
naming_method = 1 do nothing, abort creation and use existing
naming_method = 2 create new, rename existing,
naming_method = 3 create new, replace existing
'''
def new(name,naming_method):
if name in bpy.data.groups and naming_method :
grp = bpy.data.groups[name]
# if naming_method == 1 return existing
if naming_method == 2 :
grp = bpy.data.groups.new(name)
grp.name = name
elif naming_method == 3 :
bpy.data.groups.remove(grp)
grp = bpy.data.groups.new(name)
else :
grp = bpy.data.groups.new(name)
return grp
## TODO
# @param ob 'all', 'active', 'selected', <object>, 'objectname'
# @return a list of objects or an empty list
def get(grp) :
if type(ob) == str :
if ob == 'all' : return bpy.context.scene.objects
elif ob == 'active' : return [bpy.context.active_object] if bpy.context.active_object != None else []
elif ob == 'selected' : return bpy.context.selected_objects
else :
try : return [bpy.data.objects[ob]]
except : return []
return [ob]
## TODO remove an object from blender internal
def remove(ob,with_data=True) :
objs = get(ob)
#if objs :
# if type(objs) == bpy.types.Object : objs = [objs]
for ob in objs :
data = ob.data
#and_data=False
# never wipe data before unlink the ex-user object of the scene else crash (2.58 3 770 2)
# if there's more than one user for this data, never wipeOutData. will be done with the last user
# if in the list
and_data = with_data
try :
if data.users > 1 :
and_data=False
except :
and_data=False # empties
# odd (pre 2.60) :
# ob=bpy.data.objects[ob.name]
# if the ob (board) argument comes from bpy.data.groups['aGroup'].objects,
# bpy.data.groups['board'].objects['board'].users_scene
ob.name = '_dead'
for sc in ob.users_scene :
sc.objects.unlink(ob)
#try :
#print(' removing object %s...'%(ob.name)),
bpy.data.objects.remove(ob)
#print(' done.')
#except :
# print('removing failed, but renamed %s and unlinked'%ob.name)
# never wipe data before unlink the ex-user object of the scene else crash (2.58 3 770 2)
if and_data :
wipeOutData(data)
|
dmlc/mxnet
|
refs/heads/master
|
tools/caffe_converter/caffe_parser.py
|
22
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Parse caffe's protobuf
"""
import re
try:
import caffe
from caffe.proto import caffe_pb2
use_caffe = True
except ImportError:
try:
import caffe_pb2
except ImportError:
raise ImportError('You used to compile with protoc --python_out=./ ./caffe.proto')
use_caffe = False
from google.protobuf import text_format # pylint: disable=relative-import
def read_prototxt(fname):
"""Return a caffe_pb2.NetParameter object that defined in a prototxt file
"""
proto = caffe_pb2.NetParameter()
with open(fname, 'r') as f:
text_format.Merge(str(f.read()), proto)
return proto
def get_layers(proto):
"""Returns layers in a caffe_pb2.NetParameter object
"""
if len(proto.layer):
return proto.layer
elif len(proto.layers):
return proto.layers
else:
raise ValueError('Invalid proto file.')
def read_caffemodel(prototxt_fname, caffemodel_fname):
"""Return a caffe_pb2.NetParameter object that defined in a binary
caffemodel file
"""
if use_caffe:
caffe.set_mode_cpu()
net = caffe.Net(prototxt_fname, caffemodel_fname, caffe.TEST)
layer_names = net._layer_names
layers = net.layers
return (layers, layer_names)
else:
proto = caffe_pb2.NetParameter()
with open(caffemodel_fname, 'rb') as f:
proto.ParseFromString(f.read())
return (get_layers(proto), None)
def layer_iter(layers, layer_names):
"""Iterate over all layers"""
if use_caffe:
for layer_idx, layer in enumerate(layers):
layer_name = re.sub('[-/]', '_', layer_names[layer_idx])
layer_type = layer.type
layer_blobs = layer.blobs
yield (layer_name, layer_type, layer_blobs)
else:
for layer in layers:
layer_name = re.sub('[-/]', '_', layer.name)
layer_type = layer.type
layer_blobs = layer.blobs
yield (layer_name, layer_type, layer_blobs)
|
GitAngel/django
|
refs/heads/master
|
django/contrib/gis/utils/wkt.py
|
589
|
"""
Utilities for manipulating Geometry WKT.
"""
from django.utils import six
def precision_wkt(geom, prec):
"""
Returns WKT text of the geometry according to the given precision (an
integer or a string). If the precision is an integer, then the decimal
places of coordinates WKT will be truncated to that number:
>>> from django.contrib.gis.geos import Point
>>> pnt = Point(5, 23)
>>> pnt.wkt
'POINT (5.0000000000000000 23.0000000000000000)'
>>> precision_wkt(pnt, 1)
'POINT (5.0 23.0)'
If the precision is a string, it must be valid Python format string
(e.g., '%20.7f') -- thus, you should know what you're doing.
"""
if isinstance(prec, int):
num_fmt = '%%.%df' % prec
elif isinstance(prec, six.string_types):
num_fmt = prec
else:
raise TypeError
# TODO: Support 3D geometries.
coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords):
return ','.join(coord_fmt % c[:2] for c in coords)
def formatted_poly(poly):
return ','.join('(%s)' % formatted_coords(r) for r in poly)
def formatted_geom(g):
gtype = str(g.geom_type).upper()
yield '%s(' % gtype
if gtype == 'POINT':
yield formatted_coords((g.coords,))
elif gtype in ('LINESTRING', 'LINEARRING'):
yield formatted_coords(g.coords)
elif gtype in ('POLYGON', 'MULTILINESTRING'):
yield formatted_poly(g)
elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON':
yield ','.join('(%s)' % formatted_poly(p) for p in g)
elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join(''.join(wkt for wkt in formatted_geom(child)) for child in g)
else:
raise TypeError
yield ')'
return ''.join(wkt for wkt in formatted_geom(geom))
|
rafamoreira/dotfiles
|
refs/heads/master
|
terminator/.config/terminator/plugins/terminator-themes.py
|
3
|
import requests
import terminatorlib.plugin as plugin
from gi.repository import Gtk
from terminatorlib.config import ConfigBase
from terminatorlib.translation import _
from terminatorlib.util import get_config_dir, err, dbg, gerr
AVAILABLE = ['TerminatorThemes']
class TerminatorThemes(plugin.Plugin):
capabilities = ['terminal_menu']
config_base = ConfigBase()
base_url = 'https://api.github.com/repos/EliverLara/terminator-themes/contents/themes.json'
inherits_config_from = "default"
def callback(self, menuitems, menu, terminal):
"""Add our item to the menu"""
self.terminal = terminal
item = Gtk.ImageMenuItem(Gtk.STOCK_FIND)
item.connect('activate',self.configure)
item.set_label("Themes")
item.set_sensitive(True)
menuitems.append(item)
def configure(self, widget, data = None):
ui = {}
dbox = Gtk.Dialog( _("Terminator themes"), None, Gtk.DialogFlags.MODAL)
headers = { "Accept": "application/vnd.github.v3.raw" }
response = requests.get(self.base_url, headers=headers)
if response.status_code != 200:
gerr(_("Failed to get list of available themes"))
return
self.themes_from_repo = response.json()["themes"]
self.profiles = self.terminal.config.list_profiles()
main_container = Gtk.HBox(spacing=7)
main_container.pack_start(self._create_themes_list(ui), True, True, 0)
main_container.pack_start(self._create_settings_grid(ui), True, True, 0)
dbox.vbox.pack_start(main_container, True, True, 0)
self.dbox = dbox
dbox.show_all()
res = dbox.run()
if res == Gtk.ResponseType.ACCEPT:
self.terminal.config.save()
del(self.dbox)
dbox.destroy()
return
def _create_themes_list(self, ui):
profiles_list_model = Gtk.ListStore(str, bool, object)
# Set add/remove buttons availability
for theme in self.themes_from_repo:
if theme["name"] in self.profiles:
profiles_list_model.append([theme["name"], False, theme])
else:
profiles_list_model.append([theme["name"], True, theme])
treeview = Gtk.TreeView(profiles_list_model)
selection = treeview.get_selection()
selection.set_mode(Gtk.SelectionMode.SINGLE)
selection.connect("changed", self.on_selection_changed, ui)
ui['treeview'] = treeview
renderer_text = Gtk.CellRendererText()
column_text = Gtk.TreeViewColumn("Theme", renderer_text, text=0)
treeview.append_column(column_text)
scroll_window = Gtk.ScrolledWindow()
scroll_window.set_size_request(300, 250)
scroll_window.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scroll_window.add_with_viewport(treeview)
return scroll_window
def _create_settings_grid(self, ui):
grid = Gtk.Grid()
grid.set_column_spacing(5)
grid.set_row_spacing(7)
grid.attach(self._create_default_inherits_check(ui), 0, 0, 2, 1)
grid.attach(Gtk.Label("Available profiles: "), 0, 1, 1, 1)
grid.attach(self._create_inherits_from_combo(ui), 1, 1, 1, 1)
grid.attach(self._create_main_action_button(ui, "install", self.on_install), 0, 4, 1, 1)
grid.attach(self._create_main_action_button(ui, "remove", self.on_uninstall), 1, 4, 1, 1)
return grid
def _create_default_inherits_check(self, ui):
check = Gtk.CheckButton("Inherit preferences from default profile")
check.set_active(True)
check.connect("toggled", self.on_inheritsfromdefaultcheck_toggled, ui)
ui['check_inherits_from_default'] = check
return check
def _create_inherits_from_combo(self, ui):
combo = Gtk.ComboBoxText()
combo.set_entry_text_column(0)
combo.set_sensitive(False)
combo.connect("changed", self.on_inheritsfromcombo_changed, ui)
ui['inherits_from_combo'] = combo
for profile in self.profiles:
combo.append_text(profile)
combo.set_active(self.profiles.index(self.terminal.config.get_profile()))
return combo
def _create_main_action_button(self, ui, label, action):
btn = Gtk.Button(_(label.capitalize()))
btn.connect("clicked", action, ui)
btn.set_sensitive(False)
ui['button_' + label] = btn
return btn
def on_inheritsfromdefaultcheck_toggled(self, check, data=None):
if check.get_active() is not True:
data["inherits_from_combo"].set_sensitive(True)
self.inherits_config_from = self.profiles[data['inherits_from_combo'].get_active()]
else:
data["inherits_from_combo"].set_sensitive(False)
self.inherits_config_from = 'default'
def on_inheritsfromcombo_changed(self, combo, data):
if combo.get_sensitive():
self.inherits_config_from = self.profiles[combo.get_active()]
else:
self.inherits_config_from = 'default'
def on_selection_changed(self, selection, data=None):
(model, iter) = selection.get_selected()
data['button_install'].set_sensitive(model[iter][1])
data['button_remove'].set_sensitive(model[iter][1] is not True)
def on_uninstall(self, button, data):
treeview = data['treeview']
selection = treeview.get_selection()
(store, iter) = selection.get_selected()
target = store[iter][0]
# If selected theme is active, sets terminal profile to default before unistalling
if self.terminal.get_profile() == target:
widget = self.terminal.get_vte()
self.terminal.force_set_profile(widget, 'default')
self.terminal.config.del_profile(target)
self.terminal.config.save()
self.update_comboInheritsFrom(data)
#'Add' button available again
data['treeview'].get_model().set_value(iter, 1, True)
self.on_selection_changed(selection, data)
def on_install(self, button, data):
treeview = data['treeview']
selection = treeview.get_selection()
(store, iter) = selection.get_selected()
target = store[iter][2]
widget = self.terminal.get_vte()
treeview.set_enable_tree_lines(False)
if not iter:
return
self.terminal.config.add_profile(target["name"])
template_data = self.config_base.profiles[self.inherits_config_from].copy()
for k, v in target.items():
if k != 'background_image' and k != 'name' and k != 'type':
if k == 'background_darkness':
template_data[k] = float(v)
else:
template_data[k] = v
for k, v in template_data.items():
self.config_base.set_item(k, v, target["name"])
self.terminal.force_set_profile(widget, target["name"])
self.terminal.config.save()
self.update_comboInheritsFrom(data)
# "Remove" button available again
data['treeview'].get_model().set_value(iter, 1, False)
self.on_selection_changed(selection, data)
treeview.set_enable_tree_lines(True)
def update_comboInheritsFrom(self, data):
data['inherits_from_combo'].remove_all()
profiles = self.terminal.config.list_profiles()
self.profiles = profiles
for profile in profiles:
data['inherits_from_combo'].append_text(profile)
data['inherits_from_combo'].set_active(profiles.index(self.terminal.config.get_profile()))
|
jjmleiro/hue
|
refs/heads/master
|
desktop/core/ext-py/boto-2.38.0/boto/logs/exceptions.py
|
150
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.exception import BotoServerError
class LimitExceededException(BotoServerError):
pass
class DataAlreadyAcceptedException(BotoServerError):
pass
class ResourceInUseException(BotoServerError):
pass
class ServiceUnavailableException(BotoServerError):
pass
class InvalidParameterException(BotoServerError):
pass
class ResourceNotFoundException(BotoServerError):
pass
class ResourceAlreadyExistsException(BotoServerError):
pass
class OperationAbortedException(BotoServerError):
pass
class InvalidSequenceTokenException(BotoServerError):
pass
|
lmregus/Portfolio
|
refs/heads/master
|
python/design_patterns/env/lib/python3.7/site-packages/pygments/styles/sas.py
|
31
|
# -*- coding: utf-8 -*-
"""
pygments.styles.sas
~~~~~~~~~~~~~~~~~~~
Style inspired by SAS' enhanced program editor. Note This is not
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Other, Whitespace, Generic
class SasStyle(Style):
"""
Style inspired by SAS' enhanced program editor. Note This is not
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #008800',
String: '#800080',
Number: 'bold #2e8b57',
Other: 'bg:#ffffe0',
Keyword: '#2c2cff',
Keyword.Reserved: 'bold #353580',
Keyword.Constant: 'bold',
Name.Builtin: '#2c2cff',
Name.Function: 'bold italic',
Name.Variable: 'bold #2c2cff',
Generic: '#2c2cff',
Generic.Emph: '#008800',
Generic.Error: '#d30202',
Error: 'bg:#e3d2d2 #a61717'
}
|
wdmchaft/taskcoach
|
refs/heads/master
|
taskcoachlib/i18n/br.py
|
1
|
# -*- coding: UTF-8 -*-
#This is generated code - do not edit
encoding = 'UTF-8'
dict = {'Use application-wide setting': 'Implij kefluniadur an arload a-bezh', 'Monthly': 'Miziek', 'Minimize priority': 'Minimiza\xc3\xb1 ar briorelezh', 'Never': 'Morse', 'HTML files (*.html)|*.html|All files (*.*)|*': 'Restro\xc3\xb9 HTML (*.html)|*.html|An holl restro\xc3\xb9 (*.*)|*', 'Mark parent task completed when all children are completed': 'Merka\xc3\xb1 ar gefridi gar evel peurechu pa vez peurechu an holl vugale', 'Make the selected task(s) the lowest priority task(s)': "Grit ar g(c'h)efridi(o\xc3\xb9) a briorelezh izela\xc3\xb1 diouzh ar g(c'h)efridi(o\xc3\xb9) diuzet", '%d items added.\n%d items updated.\n%d items deleted.': '%d elfenn ouzhpennet.\n%d elfenn hizivaet.\n%d elfenn dilamet.', 'Edit note...': 'Aoza\xc3\xb1 an notenn...', 'Show/hide days left column': 'Diskwel/Kuzhat bann an devezhio\xc3\xb9 a chom', '&New task...': '&Kefridi nevez...', 'Show/hide total time spent column': 'Diskwel/Kuzhat ar bann hollad an amzer implijet', '&Sort': '&Rumma\xc3\xb1', 'Redo the last command that was undone': 'Adober an urzhiad diwezha\xc3\xb1 bet disgraet', 'Browse for files to add as attachment to the selected categories': "Furchal da gavout restro\xc3\xb9 da veza\xc3\xb1 ouzhpennet d'ar rummad(o\xc3\xb9) diuzet", 'Open %s': 'Digeri\xc3\xb1 %s', 'Open all attachments of the selected note(s)': 'Digeri\xc3\xb1 holl restro\xc3\xb9 stag an notenn(o\xc3\xb9) diuzet', '&Subject': '&Danvez', 'Decrease priority': 'Digreski\xc3\xb1 ar briorelezh', 'Edit preferences': 'Aoza\xc3\xb1 ar penndibabo\xc3\xb9', 'Merged %(filename)s': '%(filename)s kendeuzet', 'Attachments': 'Restro\xc3\xb9 stag', '<h4><a name="abouteffort">About effort</a></h4>\n\n<p>Whenever you spent time on tasks, you can record the amount of time\nspent by tracking effort. Select a task and invoke \'Start tracking effort\' in\nthe Effort menu or the context menu or via the \'Start tracking effort\' toolbar \nbutton.</p>\n': '<h4><a name="abouteffort">Diwar-benn striv</a></h4>\n\n<p>Pa emaoc\'h oc\'h implijout amzer war kefridio\xc3\xb9 e c\'hallit enrolla\xc3\xb1 ar sammad amzer implijet dre an heulia\xc3\xb1 striv. Diuzit ur gefridi ha dibabit \'Loc\'ha\xc3\xb1 an heulia\xc3\xb1 striv\' er\nmeuziad Striv pe er meuziad kemperzhel pe dre an afell barrenn ostilho\xc3\xb9 \'Loc\'ha\xc3\xb1 an heulia\xc3\xb1 striv\'.</p>\n', 'Total revenue': 'Hollad ar gounido\xc3\xb9', 'Task behavior': "Emzalc'h ar gefridi", 'Sort tasks by total time spent': 'Rumma\xc3\xb1 ar gefridio\xc3\xb9 dre hollad an amzer implijet', 'Save the current file under a new name': 'Enrolla\xc3\xb1 ar restr red gant un anv nevez', 'Insert a new task with the selected categories checked': 'Enlakaat ur gefridi nevez gant ar rummado\xc3\xb9 diuzet merket', 'Browse': 'Furchal', 'Decrease priority\tCtrl+D': 'Digreski\xc3\xb1 ar briorelezh\tCtrl+D', 'Categories: %d selected, %d total': 'Rummado\xc3\xb9: %d diuzet, %d hollad', 'Version and contact information about %s': 'Aozadur ha titourou darempred diwar-benn %s', 'Only show tasks due today and tomorrow': "Diskwel ar c'hefridio\xc3\xb9 dleet hiziv ha warc'hoazh hepken", 'Export the current view as HTML file': 'Ezporzhia\xc3\xb1 ar gwel red en ur restr HTML', 'New subtask': 'Iskefridi nevez', 'Add a note to the selected attachment(s)': "Ouzhpenna\xc3\xb1 un notenn d'ar restr(o\xc3\xb9) stag diuzet", 'Allow for taking notes': 'Aotren da gemer notenno\xc3\xb9', 'Categories': 'Rummado\xc3\xb9', 'Dates': 'Deiziado\xc3\xb9', 'Total fi&xed fee': 'Hollad an &treziado\xc3\xb9', 'Budget': 'Budjed', 'Print preview': 'Rakgwel ar moulla\xc3\xb1', 'Delete the selected notes': 'Dilemel an notenno\xc3\xb9 diuzet', '&Rename viewer...': '&Adenvel ar gweler...', 'Tasks: %d selected, %d visible, %d total': 'Kefridio\xc3\xb9: %d diuzet, %d gwelus, %d en holl', 'Match case': "Teuler evezh ouzh ar c'hef", 'Sort case sensitive': "Rumma\xc3\xb1 en ur deuler evezh ouzh ar c'hef", 'Hide the selected column': 'Kuzhat ar bann diuzet', 'Tuesday': 'Meurzh', '&Category': '&Rummad', '&New viewer': 'Gweler &Nevez', 'Sort attachments by subject': 'Rumma\xc3\xb1 ar restro\xc3\xb9 stag dre zanvez', 'Show/hide recurrence column': 'Diskwel/Kuzhat bann ar rekura\xc3\xb1so\xc3\xb9', '&Edit effort...': '&Aoza\xc3\xb1 ar striv...', 'St&op tracking effort': '&Paouez an heulia\xc3\xb1 striv', 'New note with selected categories...': 'Notenn nevez gant ar rummado\xc3\xb9 diuzet...', 'Sort tasks by subject': 'Rumma\xc3\xb1 ar gefridio\xc3\xb9 dre zanvez', '&Mark task uncompleted\tCtrl+RETURN': '&Merka\xc3\xb1 ar gefridi evel diechu\tCtrl+RETURN', 'Delete category': 'Dilemel ar rummad', 'Open all attachments of the selected categories': 'Digeri\xc3\xb1 holl restro\xc3\xb9 stag ar rummado\xc3\xb9 diuzet', 'Sort tasks by completion date': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre zeiziad peurechui\xc3\xb1", '&Tips': '&Tunio\xc3\xb9', '&Time spent': '&Amzer implijet', '<h4><a name="abouttasks">About tasks</a></h4>\n \n<p>Tasks are the basic objects that you manipulate. Tasks can\nrepresent anything from a simple little thing you have to do, like buying a gift\nfor your loved one, to a complete project, consisting of different phases, and\nnumerous activities.</p>\n': '<h4><a name="abouttasks">Diwar-benn ar c\'hefridio\xc3\xb9</a></h4>\n \n<p>Ar c\'hefridio\xc3\xb9 eo an draezenno\xc3\xb9 diazez a zo dornataet ganeoc\'h. Ar c\'hefridio\xc3\xb9 a c\'hell\nober evit n\'eus forzh petra eus un dra vihan eeun ho peus da ober, evel prena\xc3\xb1 ur prof\nd\'ho karedig, betek ur raktres klok, c\'hoarvezet e prantado\xc3\xb9 dishe\xc3\xb1vel, hag\nobererezhio\xc3\xb9 lies.</p>\n', 'Sort by description': 'Rumma\xc3\xb1 dre zeskrivadur', 'Edit the selected attachments': 'Aoza\xc3\xb1 ar restro\xc3\xb9 stag diuzet', 'Increase priority\tCtrl+I': 'Kreski\xc3\xb1 ar briorelezh\tCtrl+I', 'If your language is not available, or the translation needs \nimproving, please consider helping. See:': "Ma ne c'haller ket kavout ho yezh, pe ma vefe ezhomm da \nwellaat an droidigezh, klaskit rei\xc3\xb1 skoazell mar plij. Gwelit :", 'Small images (16x16) on the toolbar': 'Skeudenno\xc3\xb9 bihan (16x16) war ar varrenn ostilho\xc3\xb9', 'Total fixed fee': 'Hollad an treziado\xc3\xb9', 'Sort notes by category': 'Rumma\xc3\xb1 an notenno\xc3\xb9 dre rummad', 'Show/hide categories column': 'Diskwel/Kuzhat bann ar rummado\xc3\xb9', '&Save\tCtrl+S': '&Enrolla\xc3\xb1\tCtrl+S', '&Attachments': '&Restro\xc3\xb9 stag', 'New attachment': 'Restr stag nevez', 'Edit the selected notes': 'Aoza\xc3\xb1 an notenno\xc3\xb9 diuzet', 'P&aste into task\tShift+Ctrl+V': 'Peg&a\xc3\xb1 er gefridi\tShift+Ctrl+V', 'SyncML preferences...': 'Penndibabo\xc3\xb9 SyncML...', 'All date columns': 'Holl bannou an deiziado\xc3\xb9', '&Start tracking effort': "&Loc'ha\xc3\xb1 an heulia\xc3\xb1 striv", '%s license': 'aotre %s', '%(name)s has a mailinglist where you can discuss usage of %(name)s with fellow users, discuss and request features and complain about bugs. Go to %(url)s and join today!': "%(name)s en deus ur roll skigna\xc3\xb1 e-lec'h ma c'hellit eskemm gant implijerien boas diwar-benn implij %(name)s, kaozeal ha goulenn keweriustero\xc3\xb9 ha klemm diwar-benn bogo\xc3\xb9. Kit da %(url)s hag emellit hiziv!", 'Wednesday': "Merc'her", 'Merge': 'Kendeuzi\xc3\xb1', 'Only show tasks due this month': "Diskwel ar c'hefridio\xc3\xb9 dleet er miz-ma\xc3\xb1 hepken", 'Show/hide notes column': 'Diskwel/Kuzhat bann an notenno\xc3\xb9', 'Start tracking effort for %s': "Loc'ha\xc3\xb1 an heulia\xc3\xb1 striv evit %s", 'Hide main window when iconized': 'Kuzhat ar prenestr penna\xc3\xb1 pa vez arlunet', '&Mark task completed\tCtrl+RETURN': '&Merka\xc3\xb1 ar gefridi evel peurechu\tCtrl+RETURN', 'Show/hide hourly fee column': 'Diskwel/Kuzhat bann an treziado\xc3\xb9 dre eurvezh', '&Unlimited': '&Didermen', 'month(s),': 'miz(io\xc3\xb9),', '&Edit task...': '&Aoza\xc3\xb1 ar gefridi...', 'Period': 'Mare', 'T&otal revenue': '&Hollad ar gounido\xc3\xb9', 'Export as HTML...': 'Ezporzhia\xc3\xb1 evel HTML...', '&Expand all items\tShift+Ctrl+E': '&Digeri\xc3\xb1 gwezenn an holl elfenno\xc3\xb9\tShift+Ctrl+E', 'Auto save after every change': 'Enrolla\xc3\xb1 emgefreek goude pep kemm', 'Error while reading %s:\n': 'Fazi en ur lenn %s:\n', 'Mark the selected task(s) completed': "Merka\xc3\xb1 ar g(c'h)efridi(o\xc3\xb9) evel peurechu", 'Insert a new subcategory': 'Enlakaat un isrummad nevez', 'Sort tasks by total budget': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre hollad ar budjed", 'Sort tasks by total budget left': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre hollad ar budjedo\xc3\xb9 hegerz", 'Copy the selected item(s) to the clipboard': "Eila\xc3\xb1 an elfenn(o\xc3\xb9) diuzet d'ar golver", 'New note...': 'Notenn nevez...', 'New note': 'Notenn nevez', 'Allow for tracking effort': "Aotren d'an heulia\xc3\xb1 striv", 'New version of %(name)s available': "Un doare nevez eus %(name)s a c'haller e gaout", 'Wo&rkweek': '&Sizhunvezhiad labour', 'Total re&venue': 'Hollad ar &gounido\xc3\xb9', 'Open all attachments of the selected task(s)': '&Digeri\xc3\xb1 holl restro\xc3\xb9 stag ar gefridi(o\xc3\xb9) diuzet', '&Export': '&Ezporzhia\xc3\xb1', 'C&ollapse': '&Serri\xc3\xb1 ar wezenn', 'Notify me of new versions.': 'Kemenn din an doareo\xc3\xb9 nevez.', 'Help: %s license': 'Skoazell: aotre %s', 'Delete the selected task(s)': 'Dilemel ar gefridi(o\xc3\xb9) diuzet', '&Effort': '&Striv', '&Merge...': '&Kendeuzi\xc3\xb1...', '&Filter': '&Sil', 'Save the selected tasks to a separate file': "Enrolla\xc3\xb1 ar c'hefridio\xc3\xb9 diuzet d'ur restr distag", '&Priority': '&Renk', 'Budget &left': 'Budjed &hegerz', 'All files (*.*)|*': 'An holl restro\xc3\xb9 (*.*)|*', 'Save the current file': 'Enrolla\xc3\xb1 ar restr red', "%(name)s is available in a number of different languages. Select 'Edit' -> 'Preferences' to see whether your language is one of them. If your language is not available or the translation needs improvement, please consider helping with the translation of %(name)s. Visit %(url)s for more information about how you can help.": "Gallout a reer kaout %(name)s e meur a yezh. Diuzit 'Aoza\xc3\xb1' -> 'Penndibabo\xc3\xb9' da welout hag-e\xc3\xb1 eo ho yezh unan anezho. Ma ne c'hallfed ket kaout ho yezh pe ma vefe ezhomm da wellaat an droidigezh, klaskit rei\xc3\xb1 skoazell evit troidigezh %(name)s mar plij. Gweladennit %(url)s evit kaout muioc'h a ditouro\xc3\xb9 diwar-benn an doare ma c'hallfec'h sikour.", 'Total budget left': 'Hollad ar budjedo\xc3\xb9 hegerz', '&Hide': '&Kuzhat', 'Cu&t\tCtrl+X': "&Troc'ha\xc3\xb1\tCtrl+X", 'week(s),': 'sizhun(io\xc3\xb9),', '&Paste\tCtrl+V': '&Pega\xc3\xb1\tCtrl+V', 'New category...': 'Rummad nevez...', 'Status: %d filtered': 'Stad: %d silet', 'Show/hide attachment column': 'Diskwel/Kuzhat bann ar restro\xc3\xb9 stag', 'Exit %s': 'Kuitaat %s', 'Hide the toolbar': 'Kuzhat ar varenn ostilho\xc3\xb9', '&License': '&Aotre', 'Sort notes by categories': 'Rumma\xc3\xb1 an notenno\xc3\xb9 dre rummad', 'New task due tomorrow': "Kefridi nevez dleet warc'hoazh", 'Delete attachment': 'Dilemel ar restr stag', 'Sort tasks by priority': 'Rumma\xc3\xb1 ar gefridio\xc3\xb9 dre renk', 'Edit the selected categories': 'Aoza\xc3\xb1 ar rummado\xc3\xb9 diuzet', 'Insert a new note with the selected categories checked': 'Enlakaat un notenn nevez gant ar rummado\xc3\xb9 diuzet merket', 'Filter on any checked category': 'Sil war pep rummad merket', 'Task': 'Kefridi', '&Small images': 'Skeudenno\xc3\xb9 &bihan', 'New title for the viewer:': "Titl nevez d'ar gweler :", 'Add attachment': 'Ouzhpenna\xc3\xb1 ur restr stag', 'now': 'brema\xc3\xb1', 'Increase the priority of the selected task(s)': "Kreski\xc3\xb1 priorelezh ar g(c'h)efridi(o\xc3\xb9) diuzet", 'Add a note to the selected category(ies)': "Ouzhpenna\xc3\xb1 un notenn d'ar rummad(o\xc3\xb9) diuzet", '%s file error': 'fazi restr %s', '<h3><a name="categories">Categories</a></h3>\n': '<h3><a name="categories">Rummado\xc3\xb9</a></h3>\n', '&Hourly fee': 'Treziad dre &eurvezh', 'Insert a new subtask into the selected task': 'Enlakaat un iskefridi nevez er gefridi diuzet', 'Subject': 'Danvez', '&Dates': '&Deiziado\xc3\xb9', 'Notes: %d selected, %d total': 'Notenno\xc3\xb9: %d diuzet, %d hollad', 'Show all tasks': 'Diskwel an holl gefridio\xc3\xb9', '&Restore': '&Adsevel', 'Start date': "Deiziad loc'ha\xc3\xb1", 'Reminder': "Dalc'her-so\xc3\xb1jo\xc3\xb9", 'Are you sure it is a %s-file?': "Sur oc'h ez eo ur restr %s?", '&Hide this column': '&Kuzhat ar bann-ma\xc3\xb1', 'Sort tasks by number of days left': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre niver a zevezhio\xc3\xb9 a chom", 'Unselect all items': 'Ezdiuza\xc3\xb1 an holl elfenno\xc3\xb9', '&Clear selection': '&Goullonderi\xc3\xb1 an diuzadenn', '&Select': '&Diuza\xc3\xb1', 'None': 'Tra ebet', 'S&ave as...\tShift+Ctrl+S': 'E&nrolla\xc3\xb1 dindan...\tShift+Ctrl+S', 'Show/hide inactive tasks (tasks with a start date in the future)': "Diskwel/Kuzhat ar c'hefridio\xc3\xb9 marv (ar c'hefridio\xc3\xb9 a zo un deiziad loc'ha\xc3\xb1 en dazont ganto)", 'Sort notes by subject': 'Rumma\xc3\xb1 an notenno\xc3\xb9 dre zanvez', 'Add an effort period to the selected task(s)': "Ouzhpenna\xc3\xb1 ur mare striv d'ar g(c'h)efridi(o\xc3\xb9) diuzet", 'Delete': 'Dilemel', 'Maximize priority': 'Maksimiza\xc3\xb1 ar briorelezh', '&Add attachment': '&Ouzhpenna\xc3\xb1 ur restr stag', "What is actually printed when you select 'File' -> 'Print' depends on the current view. If the current view shows the task list, a list of tasks will be printed, if the current view shows effort grouped by month, that will be printed. The same goes for visible columns, sort order, filtered tasks, etc.": "Ar pezh a vo moulet e gwirionez pa diuzit 'Restr' -> 'Moula\xc3\xb1' a vez diouzh ar gwel red. Ma eo kinniget roll ar c'hefridio\xc3\xb9 er gwel red e vo moulet ur roll kefridio\xc3\xb9, ma eo kinniget ar strivo\xc3\xb9 strollet dre viz er gwel red e vo moulet evel-se. Memes tra evit ar vanno\xc3\xb9 welus, ar renk rumma\xc3\xb1, ar c'hefridio\xc3\xb9 silet, h.a.", 'Export as CSV...': 'Ezporzhia\xc3\xb1 evel CSV...', 'Sort tasks by recurrence': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre rekura\xc3\xb1s", '&Medium-sized images': 'Skeudenno\xc3\xb9 &krenn', 'Sort tasks by due date': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre zeiziad dleet", 'Start': "Loc'ha\xc3\xb1", 'Print the current file': 'Moulla\xc3\xb1 ar restr red', 'Delete note': 'Dilemel an notenn', 'For this note including all subnotes': 'Evit an notenn-ma\xc3\xb1 hag an holl isnotenno\xc3\xb9', '&Quit\tCtrl+Q': '&Kuitaat\tCtrl+Q', '&Reminder': '&So\xc3\xb1jdegaser', 'Monday': 'Lun', 'Open the selected attachments': 'Digeri\xc3\xb1 ar restro\xc3\xb9 stag diuzet', 'Stop tracking effort for the active task(s)': "Paouez an heulia\xc3\xb1 striv ar g(c'h)efridi(o\xc3\xb9) v(b)ev", 'Export the current view in Comma Separated Values (CSV) format': 'Ezporzhia\xc3\xb1 ar gwel red e stumm Comma Separated Values (CVS)', '&Fixed fee': '&Treziad', 'Merge tasks from another file with the current file': 'Kendeuzi\xc3\xb1 kefridio\xc3\xb9 eus ur restr all gant ar restr red', '%s template files (*.tsktmpl)|*.tsktmpl': '%s restr patrom (*.tsktmpl)|*.tsktmpl', 'Mark task completed when all children are completed?': 'Merka\xc3\xb1 ar gefridi evel peurechu pa vez peurechu ar vugale?', 'Language': 'Yezh', 'Sort tasks by fixed fee': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre dreziad", 'Sort tasks by time spent': 'Rumma\xc3\xb1 ar gefridio\xc3\xb9 dre amzer implijet', '&Help': '&Skoazell', 'Rename viewer': 'Adenvel ar gweler', 'Include sub items': 'Lakaat an iselfenno\xc3\xb9 e-barzh', 'Insert a new task': 'Enlakaat ur gefridi nevez', 'Only show tasks due this year': "Diskwel ar c'hefridio\xc3\xb9 dleet er bloaz-ma\xc3\xb1 hepken", 'Sort tasks by revenue': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre c'hounid", '&About %s': '&Diwar-benn %s', 'Cancel': 'Nulla\xc3\xb1', 'Show/hide all budget-related columns': "Diskwel/Kuzhat an holl vanno\xc3\xb9 a ra dave d'ur budjed bennak", 'Add a new template from a template file\n': 'Ouzhpenna\xc3\xb1 ur patrom nevez diwar ur restr patrom\n', 'New subcategory': 'Isrummad nevez', '%s (subtasks)': '%s (iskefridio\xc3\xb9)', 'Show/hide description column': 'Diskwel/Kuzhat bann an deskrivadurio\xc3\xb9', 'Sort tasks by overall priority': 'Rumma\xc3\xb1 ar gefridio\xc3\xb9 dre bennrenk', 'Edit the selected task': 'Aoza\xc3\xb1 ar gefridi diuzet', 'Show/hide time spent column': 'Diskwel/Kuzhat bann an amzerio\xc3\xb9 implijet', 'Recent searches': 'klasko\xc3\xb9 fresk', 'Medium-sized images (22x22) on the toolbar': 'Skeudenno\xc3\xb9 krenn (22x22) war ar varrenn ostilho\xc3\xb9', 'Add ¬e': 'Ouzhpenna\xc3\xb1 un ¬enn', 'Delete the selected categories': 'Dilemel ar rummado\xc3\xb9 diuzet', 'Collapse all items with subitems': '&Serri\xc3\xb1 gwezenn an holl elfenno\xc3\xb9 gant an iselfenno\xc3\xb9', 'Delete the selected effort period(s)': 'Dilemel ar mare(o\xc3\xb9) striv diuzet', 'New subnote...': 'Isnotenn nevez...', 'Reminder date/time': 'So\xc3\xb1jdegaser deiziad/amzer', 'Show all items (reset all filters)': 'Diskouez an holl elfenno\xc3\xb9', 'Expand the selected item(s)': 'Digeri\xc3\xb1 gwezenn an elfenn(o\xc3\xb9) diuzet', 'Restore the window to its previous state': "Adsevel ar prenestr d'e stad kent", 'Paste into task': 'Pega\xc3\xb1 er gefridi', 'O&verall priority': '&Pennrenk', 'Budget left': 'Budjed hegerz', 'Print...\tCtrl+P': 'Moulla\xc3\xb1...\tCtrl+P', 'Mail task': 'Postela\xc3\xb1 ar gefridi', 'Sort tasks by budget left': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre vudjed hegerz", 'Saturday': 'Sadorn', '<h3><a name="tasks">Tasks</a></h3>\n': '<h3><a name="tasks">Kefridio\xc3\xb9</a></h3>\n', 'Insert a new attachment': 'Enlakaat ur restr stag nevez', '&Large images': 'Skeudenno\xc3\xb9 &bras', 'Tree of tasks': "Gwezenn ar c'hefridio\xc3\xb9", 'Effort per month': 'Striv dre viz', 'Show/hide total time spent column (total time includes time spent on subtasks)': 'Diskwel/Kuzhat bann hollado\xc3\xb9 an amzerio\xc3\xb9 implijet (hollad an amzer a zo an amzer implijet gant an iskefridio\xc3\xb9 enna\xc3\xb1)', 'Total budget': 'Hollad ar budjed', 'For this task': 'Evit ar gefridi-ma\xc3\xb1', '&Month': '&Mizvezh', 'Eff&ort': '&Striv', 'New subnote': 'Isnotenn nevez', 'Show/hide revenue column': 'Diskwel/Kuzhat bann ar gounido\xc3\xb9', 'Edit category...': 'Aoza\xc3\xb1 ar rummad...', 'Save as template': 'Enrolla\xc3\xb1 evel ur patrom', '&Ascending': 'Dre &sevel', 'Language not found?': 'Ne gaver ket ar yezh-se ?', 'Check spelling in editors': 'Gwiriekaat ar reizhskrivadur e-barzh aozerio\xc3\xb9', 'Show/hide total budget column (total budget includes budget for subtasks)': 'Diskwel/Kuzhat bann hollado\xc3\xb9 ar budjedo\xc3\xb9 (hollad ar budjed a zo budjed an iskefridio\xc3\xb9 enna\xc3\xb1)', 'Time spent': 'Amzer implijet', 'Notes': 'Notenno\xc3\xb9', 'D&ays left': '&Devezhio\xc3\xb9 a chom', '&Budget': '&Budjed', 'T&oolbar': 'Barren &Ostilho\xc3\xb9', 'Help': 'Skoazell', 'Add a note to the selected task(s)': "Ouzhpenna\xc3\xb1 un notenn d'ar (c'h)gefridi(o\xc3\xb9) diuzet", 'Sort tasks by start date': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre zeiziad loc'ha\xc3\xb1", 'Only show tasks due this work week (i.e. before Friday)': "Diskwel ar c'hefridio\xc3\xb9 dleet er sizhunvezhiad labour-ma\xc3\xb1 hepken (da l\xc3\xa2ret eo a-raok ar gwener)", 'All budget columns': 'An holl vanno\xc3\xb9 budjed', 'New task': 'Kefridi nevez', 'Error while saving %s.ini:\n%s\n': "Fazi oc'h enrolla\xc3\xb1 %s.ini:\n%s\n", 'Status: %d over due, %d inactive, %d completed': 'Stad: %d dreist an termen, %d marv, %d peurechu', 'Insert a new note': 'Enlakaat un notenn nevez', 'Sort tasks by reminder date and time': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre so\xc3\xb1jdegaser deiziad hag amzer", 'Priority': 'Renk', 'Click this button to change the color of completed tasks': "Klika\xc3\xb1 war an afell-ma\xc3\xb1 da gemm liv ar c'hefridio\xc3\xb9 beurechu", 'Show tips window on startup': "Diskwel prenestr an tunio\xc3\xb9 d'al loc'ha\xc3\xb1", 'Delete the selected attachments': 'Dilemel ar restro\xc3\xb9 stag diuzet', 'Co&mpletion date': 'Deiziad &peurechu', 'Maximize priority\tShift+Ctrl+I': 'Maksimiza\xc3\xb1 ar briorelezh\tShift+Ctrl+I', 'Browse for files to add as attachment to the selected task(s)': "Furchal da gavout restro\xc3\xb9 da veza\xc3\xb1 ouzhpennet d'ar g(c'h)efridi(o\xc3\xb9) diuzet", 'Insert a new category': 'Enlakaat ur rummad nevez', 'For this task including all subtasks': 'Evit ar gefridi-ma\xc3\xb1 hag an holl iskefridio\xc3\xb9 enni', 'Total time spent': 'Hollad an amzer implijet', 'Show/hide total fixed fee column': 'Diskwel/Kuzhat bann hollado\xc3\xb9 an treziado\xc3\xb9', 'Viewer not searchable': "Ne c'haller ket klask gant ar gweler-ma\xc3\xb1", '&Description': '&Deskrivadur', 'Effort per week': 'Striv dre sizhun', 'Untitled e-mail': 'Postel hep titl', 'Edit attachment...': 'Aoza\xc3\xb1 ar restr stag...', '&Collapse all items\tShift+Ctrl+C': '&Serri\xc3\xb1 gwezenn an holl elfenno\xc3\xb9\tShift+Ctrl+C', 'Start tracking effort for the selected task(s)': "Loc'ha\xc3\xb1 an heulia\xc3\xb1 striv evit ar gefridi(o\xc3\xb9) diuzet", 'Sub&ject': '&Danvez', 'Due date': 'Deiziad dleet', 'Edit the selected effort period(s)': 'Aoza\xc3\xb1 ar mare(o\xc3\xb9) striv diuzet', 'Open a %s file': 'Digeri\xc3\xb1 ur restr %s', 'Open template...': 'Digeri\xc3\xb1 ur patrom...', '&File': '&Restr', 'New category': 'Rummad nevez', 'Friday': 'Gwener', 'Tips about the program': 'Tunio\xc3\xb9 diwar-benn ar programm', 'Show/hide budget column': 'Diskwel/Kuzhat bann ar vudjedo\xc3\xb9', 'Help about the program': 'Skoazell diwar-benn ar programm', '&All\tCtrl+A': 'An &holl\tCtrl+A', 'Status: n/a': 'Stad: n/a', 'Check for new version of %(name)s on startup': "Gwiriekaat beza\xc3\xb1s un doare nevez eus %(name)s el loc'ha\xc3\xb1", 'Welcome to %(name)s version %(version)s': 'Degemer mat e %(name)s aozadur %(version)s', 'Sort tasks by total revenue': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre hollad ar gounido\xc3\xb9", 'Show/hide completion date column': 'Diskwel/Kuzhat bann an deiziado\xc3\xb9 peurechu', 'When comparing text, sorting is case sensitive (checked) or insensitive (unchecked)': "Pa vez kenveriet skrid, rumma\xc3\xb1 en ur deuler evezh ouzh ar c'hef (merket) pe hep en ober (diverket)", 'Save as...': 'Enrolla\xc3\xb1 e...', 'Start tracking': "Loc'a\xc3\xb1 an heulia\xc3\xb1", 'Filter on all checked categories': 'Sil war an holl rummado\xc3\xb9 merket', 'Minimize priority\tShift+Ctrl+D': 'Minimiza\xc3\xb1 ar briorelezh\tShift+Ctrl+D', 'Show/hide all finance-related columns': "Diskwel/Kuzhat an holl vanno\xc3\xb9 a ra dave d'an arc'hanterezh", 'Error opening attachment': 'Fazi o tigeri\xc3\xb1 ar restr stag', '&Note': '&Notenn', 'Show/hide priority column': 'Diskwel/Kuzhat bann ar renko\xc3\xb9', 'Export as &HTML...': 'Ezporzhia\xc3\xb1 evel &HTML...', 'Show splash screen on startup': "Diskwel ar skramm degemer d'al loc'ha\xc3\xb1", 'Show/hide completed tasks': "Diskwel/Kuzhat ar c'hefridio\xc3\xb9 peurechu", '&Task': '&Kefridi', '<h4><a name="effortproperties">Effort\nproperties</a></h4>\n\n<p>Effort records have the following properties you can change:\n<ul>\n<li>Task: the task the effort belongs to.</li>\n<li>Start date/time: start date and time of the effort.</li>\n<li>Stop date/time: stop date and time of the effort. This can be \'None\' as \nlong as you are still working on the task.</li>\n<li>Description: a multi-line description of the effort.</li>\n</ul></p>\n\n<p>The following properties are calculated from the properties above:\n<ul>\n<li>Time spent: how much time you have spent working on the task.</li>\n<li>Total time spent: sum of time spent on the task and all subtasks, \nrecursively.</li>\n<li>Revenue: money earned with the time spent.</li>\n<li>Total revenue: money earned with the total time spent.</li>\n</ul></p>\n': '<h4><a name="effortproperties">Perzhio\xc3\xb9 ar striv</a></h4>\n\n<p>An enrolladurio\xc3\xb9 striv a zo ar perzhio\xc3\xb9 da heul a c\'hellit kemm dezho:\n<ul>\n<li>Kefridi: ar gefridi a zo ar striv dezhi.</li>\n<li>Loc\'ha\xc3\xb1 deiziad/amzer: loc\'ha\xc3\xb1 deiziad hag amzer ar striv.</li>\n<li>Paouez deiziad/amzer: paouez deiziad hag amzer ar striv. \'hini ebet\' a c\'hell beza\xc3\xb1 se ma \nemaoc\'h o labourat war ar gefridi c\'hoazh.</li>\n<li>Deskrivadur: un deskrivadur lieslinenn eus ar striv.</li>\n</ul></p>\n\n<p>Ar perzhio\xc3\xb9 da heul a vez jedet diouzh ar perzhio\xc3\xb9 a-us:\n<ul>\n<li>Amzer implijet: an amzer hoc\'h eus implijet o labourat war ar gefridi.</li>\n<li>Hollad an amzerio\xc3\xb9 implijet: sammad an amzerio\xc3\xb9 implijet evit ar gefridi hag he holl iskefridio\xc3\xb9.</li>\n<li>Gounid: arc\'hant gounezet gant an amzer implijet.</li>\n<li>Hollad ar gounido\xc3\xb9: arc\'hant gounezet gant hollad an amzerio\xc3\xb9 implijet.</li>\n</ul></p>\n', 'Colors': 'Livio\xc3\xb9', 'one task overdue': 'ur gefridi daleet', 'Save the selected task as a task template': 'Enrolla\xc3\xb1 ar gefridi diuzet evel ur patrom kefridi', 'Open': 'Digeri\xc3\xb1', 'Start with the main window iconized': "Loc'ha\xc3\xb1 gant ar prenestr penna\xc3\xb1 arlunet", 'Slow': 'Gorrek', 'Open a new tab with a viewer that displays notes': 'Digeri\xc3\xb1 ur steudenn nevez gant ur gweler hag a ziskouez an notenno\xc3\xb9', '&Start date': "&Deiziad loc'ha\xc3\xb1", '&Revenue': '&Gounid', 'Increase priority': 'Kreski\xc3\xb1 ar briorelezh', '&Today': '&Hiziv', 'Large images (32x32) on the toolbar': 'Skeudenno\xc3\xb9 bras (32x32) war ar varrenn ostilho\xc3\xb9', 'Fixed fee': 'Treziad', 'Show/hide due date column': 'Diskwel/Kuzhat bann an deiziado\xc3\xb9', 'Match case when filtering': "Teuler evezh ouzh ar c'hef pa vezer o sila\xc3\xb1", 'Make the selected task(s) the highest priority task(s)': "Grit ar g(c'h)efridi(o\xc3\xb9) a briorelezh uhela\xc3\xb1 diouzh ar g(c'h)efridi(o\xc3\xb9) diuzet", 'Stop': 'Paouez', '&Undo': '&Dizober', 'Expand all items with subitems': 'Digeri\xc3\xb1 gwezenn an holl elfenno\xc3\xb9 gant an iselfenno\xc3\xb9', 'Copy': 'Eila\xc3\xb1', 'Paste': 'Pega\xc3\xb1', 'Create a new task from a template': 'Kroui\xc3\xb1 ur gefridi nevez diwar ur patrom', ', every': ', bep', 'Sort tasks by total fixed fee': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre hollad an treziado\xc3\xb9", 'Select all items in the current view': 'Diuza\xc3\xb1 an holl elfenno\xc3\xb9 er gwel red', 'Version %(version)s of %(name)s is available from': "An doare %(version)s eus %(name)s a c'haller e gaout digant", 'Select a task via the menu and start tracking effort for it': "Diuza\xc3\xb1 ur gefridi dre ar meuziad ha loc'ha\xc3\xb1 an heulia\xc3\xb1 striv eviti", 'Show a preview of what the print will look like': 'Diskwel ur rakgwel eus an neuz a vo gant ar mouladur', '&Clear all filters': '&Goullonderi\xc3\xb1 an holl silo\xc3\xb9', 'Yes': 'Ya', 'Minimize main window when closed': 'Izelaat ar prenestr penna\xc3\xb1 pa vez serret', 'E&xpand': '&Digeri\xc3\xb1 ar wezenn', 'Paste item(s) from the clipboard into the selected task': "Pega\xc3\xb1 (un) elfenn(o\xc3\xb9) eus ar golver d'ar gefridi diuzet", 'Sort tasks by budget': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre vudjed", '&Redo': '&Adober', 'Closed %s': '%s serret', 'Click this button to change the color of active tasks': "Klika\xc3\xb1 war an afell-ma\xc3\xb1 da gemm liv ar c'hefridio\xc3\xb9 vev", '&View': '&Diskwel', '&Edit': '&Aoza\xc3\xb1', 'New task with selected categories...': 'Kefridi nevez gant ar rummado\xc3\xb9 diuzet...', 'Sunday': 'Sul', '&Recurrence': '&Rekura\xc3\xb1s', 'Files': 'Restro\xc3\xb9', 'If you enter a URL (e.g. %(url)s) in a task or effort description, it becomes a link. Clicking on the link will open the URL in your default web browser.': 'Ma skrivit un URL (%(url)s da skouer) e deskrivadur ur gefridi pe deskrivadur ur striv e teu anezhi da veza\xc3\xb1 ul liamm. Klika\xc3\xb1 war al liamm a zigero an URL en ho furcher kenrouedad dre ziouer.', '&Notes': '&Notenno\xc3\xb9', 'T&omorrow': "&Warc'hoazh", 'Overall priority': 'Pennrenk', 'Help: About %s': 'Skoazell: Diwar-benn %s', 'T&otal time spent': 'H&ollad an amzer implijet', '&Delete effort': '&Dilemel ar striv', 'Please enter password for user %(user)s on %(server)s:%(port)d': 'Bizskrivit ger-tremen %(user)s e %(server)s:%(port)d mar plij', 'Setup the characteristics of the printer page': 'Keflunia\xc3\xb1 perzhio\xc3\xb9 ar bajenn voulerez', 'Mail the task, using your default mailer': 'Postela\xc3\xb1 ar gefridi, dre ho posteler dre ziouer', 'Total budget l&eft': 'Hollad ar budjed hegerz', 'Open a new tab with a viewer that displays categories': 'Digeri\xc3\xb1 ur steudenn nevez gant ur gweler hag a ziskouez ar rummado\xc3\xb9', 'Start tracking from last stop time': "Loc'ha\xc3\xb1 an heulia\xc3\xb1 adalek ar c'houlz paouez diwezha\xc3\xb1", 'Click this button to change the color of over due tasks': "Klika\xc3\xb1 war an afell-ma\xc3\xb1 da gemm liv ar c'hefridio\xc3\xb9 dreist an termen", 'OK': 'Mat eo', 'Collapse the selected items with subitems': '&Serri\xc3\xb1 gwezenn an elfenno\xc3\xb9 diuzet gant an iselfenno\xc3\xb9', '&Close\tCtrl+W': '&Serri\xc3\xb1\tCtrl+W', '&Due date': 'Deiziad &dleet', 'Status&bar': '&Barrenn stad', 'Click this button to change the color of inactive tasks': "Klika\xc3\xb1 war an afell-ma\xc3\xb1 da gemm liv ar c'hefridio\xc3\xb9 varv", '&Year': '&Bloavezh', 'Make clock in the task bar tick when tracking effort': "Lakaat an horolaj er varrenn gefridi d'ober titirin pa vez heuliet ur striv", 'Editor': 'Aozer', 'Decrease the priority of the selected task(s)': "Digreski\xc3\xb1 priorelezh ar g(c'h)efridi(o\xc3\xb9) diuzet", 'Undo the last command': 'Dizober an urzhiad diwezha\xc3\xb1', 'All financial columns': "An holl vanno\xc3\xb9 arc'hanterezh", '&Categories': '&Rummado\xc3\xb9', 'Sort tasks by hourly fee': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre dreziad dre eurvezh", 'Show/hide start date column': "Diskwel/Kuzhat bann an deiziado\xc3\xb9 loc'ha\xc3\xb1", 'tracking "%s"': 'oc\'h heulia\xc3\xb1 "%s"', 'For this note': 'Evit an notenn-ma\xc3\xb1', 'Effort': 'Striv', '<h3><a name="templates">Task templates</a></h3>\n': '<h3><a name="templates">Patromo\xc3\xb9 kefridio\xc3\xb9</a></h3>\n', 'Preferences...': 'Penndibabo\xc3\xb9...', "Cannot open %s because it doesn't exist": "Ne c'haller ket digeri\xc3\xb1 %s rak n'eus ket anezha\xc3\xb1", 'Daily': 'Deizek', '&Help contents\tCtrl+?': "&Endalc'h ar skoazell\tCtrl+?", 'Sa&ve selection...': '&Enrolla\xc3\xb1 an diuzadenn...', 'Behavior': "Emzalc'h", '&Columns': '&Banno\xc3\xb9', 'New task &from template': 'Kefridi nevez &diwar ur patrom', 'CSV files (*.csv)|*.csv|Text files (*.txt)|*.txt|All files (*.*)|*': 'Restro\xc3\xb9 CSV (*.csv)|*.csv|Restro\xc3\xb9 Testenn (*.txt)|*.txt|An holl restro\xc3\xb9 (*.*)|*', 'New subcategory...': 'Isrummad nevez...', 'Always': 'Bepred', 'Tasks': 'Kefridio\xc3\xb9', 'Completion date': 'Deiziad ar peurechui\xc3\xb1', 'Total b&udget': 'Hollad ar b&udjed', 'Revenue': 'Gounid', 'tracking effort for %d tasks': "heulia\xc3\xb1 striv evit %d (g)(c'h)kefridi", 'Days left': 'Devezhio\xc3\xb9 a chom', 'Effort: %d selected, %d visible, %d total': 'Striv: %d diuzet, %d gwelus, %d en holl', 'Close the current file': 'Serri\xc3\xb1 ar restr red', 'day(s),': 'deiz(io\xc3\xb9),', 'Weekly': 'Sizhuniek', 'Cut': "Troc'ha\xc3\xb1", 'No': 'Ket', 'Loaded %(nrtasks)d tasks from %(filename)s': '%(nrtasks)d (g)kefridio\xc3\xb9 karget eus %(filename)s', 'Open attachment': 'Digeri\xc3\xb1 ar restr stag', 'You have unsaved changes.\nSave before closing?': 'Kemmo\xc3\xb9 dienroll ho peus.\nEnrolla\xc3\xb1 a-raok serri\xc3\xb1?', 'Show/hide reminder column': 'Diskwel/Kuzhat bann an so\xc3\xb1jdegaserio\xc3\xb9', '&Week': '&Sizhunvezh', 'Sort by status &first': 'Rumma\xc3\xb1 dre stad da &genta\xc3\xb1', 'Status: %d tracking': 'Stad: %d heulia\xc3\xb1', '&Delete task\tCtrl+DEL': '&Dilemel ar gefridi\tCtrl+DEL', 'Paste item(s) from the clipboard': 'Pega\xc3\xb1 an elfenn(o\xc3\xb9) eus ar golver', 'Mark the selected task(s) uncompleted': "Merka\xc3\xb1 ar g(c'h)efridi(o\xc3\xb9) diuzet evel diechu", 'Drag and drop': "Rikla\xc3\xb1-dilec'hia\xc3\xb1", 'Sort attachments by category': 'Rumma\xc3\xb1 ar restro\xc3\xb9 stag dre rummad', '&Open all attachments': '&Digeri\xc3\xb1 an holl restro\xc3\xb9 stag', 'Infinite': 'didermen', 'Sort by subject': 'Rumma\xc3\xb1 dre zanvez', '&Open...\tCtrl+O': '&Digeri\xc3\xb1...\tCtrl+O', '&Financial': "&Arc'hanterezh", 'Show/hide fixed fee column': 'Diskwel/Kuzhat bann an treziado\xc3\xb9', '%s: save changes?': "%s: enrolla\xc3\xb1 ar c'hemmo\xc3\xb9?", 'Browse for files to add as attachment to the selected note(s)': "Furchal da gaout restro\xc3\xb9 da veza\xc3\xb1 ouzhpennet d'an notenn(o\xc3\xb9) diuzet", 'Saved %(nrtasks)d tasks to %(filename)s': '%(nrtasks)d enrollet e %(filename)s', 'Edit SyncML preferences': 'Aoza\xc3\xb1 ar penndibabo\xc3\xb9 SyncML', 'Only show tasks due today': "Diskwel ar c'hefridio\xc3\xb9 dleet hiziv hepken", '<h3><a name="effort">Effort</a></h3>\n': '<h3><a name="effort">Striv</a></h3>\n', 'iCalendar files (*.ics)|*.ics|All files (*.*)|*': 'Restro\xc3\xb9 iCalendar (*.ics)|*.ics|An holl restro\xc3\xb9 (*.*)|*', 'Window behavior': "Emzalc'h ar prenestr", 'Only show tasks due this week (i.e. before Sunday)': 'Diskwel ar gefridi dleet er sizhun-ma\xc3\xb1 (da l\xc3\xa2ret eo a-raok disadorn)', 'Export as iCalendar...': 'Ezporzhia\xc3\xb1 evel iCalendar...', 'Stop tracking': 'Paouez an heulia\xc3\xb1', 'Show/hide status bar': 'Diskwel/Kuzhat ar varren stad', '&Copy\tCtrl+C': '&Eila\xc3\xb1\tCtrl+C', 'Hourly fee': 'Treziad dre eurvezh', 'Export as &CSV...': 'Ezporzhia\xc3\xb1 evel &CSV...', 'Show only tasks &due before end of': "Diskouez hepken ar c'hefridio\xc3\xb9 &dleet a-raok dibenn", 'Thursday': 'Yaou', 'Recurrence': 'Rekura\xc3\xb1s', 'Sort ascending (checked) or descending (unchecked)': 'Rumma\xc3\xb1 dre sevel (merket) pe dre ziskenn (diverket)', 'Insert a new subnote': 'Enlakaat un isnotenn nevez', 'Description': 'Deskrivadur', 'Save error': 'Fazi enrolla\xc3\xb1', 'Show/hide all date-related columns': "Diskwel/Kuzhat an holl vanno\xc3\xb9 a ra dave d'un deiziad bennak", 'Rename the selected viewer': 'Adenvel ar gwelerio\xc3\xb9 diuzet', '&Total fixed fee': '&Hollad an treziado\xc3\xb9', 'Cut the selected item(s) to the clipboard': "Troc'ha\xc3\xb1 ar g(c'h)efridi(o\xc3\xb9) diuzet d'ar golver", 'Show/hide total revenue column': 'Diskwel/Kuzhat bann hollado\xc3\xb9 ar gounido\xc3\xb9', 'New &subtask...': '&Iskefridi nevez...', 'Effort details': 'Munudo\xc3\xb9 ar striv', 'New attachment...': 'Restr stag nevez...', 'Effort per day': 'Striv dre zeiz', 'Page': 'Pajenn', '&Completion date': 'Deidiaz &peurechui\xc3\xb1', 'Color': 'Liv', 'Show/hide total budget left column (total budget left includes budget left for subtasks)': 'Diskwel/Kuzhat bann hollad ar budjedo\xc3\xb9 hegerz (hollad ar budjedo\xc3\xb9 hegerz a zo budjed hegerz an iskefridio\xc3\xb9 enna\xc3\xb1)', 'Sort tasks by status (active/inactive/completed) first': "Rumma\xc3\xb1 ar c'hefridio\xc3\xb9 dre stad (bev/marv/peurechu) da genta\xc3\xb1", 'Mail note': 'Kas an notenn der bostel', 'Show/hide budget left column': 'Diskwel/Kuzhad bann ar budjed hegerz', "%(name)s has unlimited undo and redo. Any change that you make, be it editing a task description, or deleting an effort record, is undoable. Select 'Edit' -> 'Undo' and 'Edit' -> 'Redo' to go backwards and forwards through your edit history.": 'Gant %(name)s e vez diharz an disober hag an adober. Posubl e vez disober pep kemm a rafec\'h, ma vefe aoza\xc3\xb1 deskrivadur ur gefridi pe dilemel un enrolladur striv. Diuzit \'Aoza\xc3\xb1\' -> \'Disober\' hag "Aoza\xc3\xb1\' -> \'Adober\' evit mont war-gil ha war-raok en hoc\'h istoradur aoza\xc3\xb1.', '%d tasks overdue': "%d (c'h)(g)efridi daleet", 'New task due today': 'Kefridi nevez dleet hiziv'}
|
MobinRanjbar/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/commands_sql/tests.py
|
58
|
from __future__ import unicode_literals
from django.core.management.color import no_style
from django.core.management.sql import (sql_create, sql_delete, sql_indexes,
sql_destroy_indexes, sql_all)
from django.db import connections, DEFAULT_DB_ALIAS, models
from django.test import TestCase
from django.utils import six
# See also initial_sql_regress for 'custom_sql_for_model' tests
class SQLCommandsTestCase(TestCase):
"""Tests for several functions in django/core/management/sql.py"""
def count_ddl(self, output, cmd):
return len([o for o in output if o.startswith(cmd)])
def test_sql_create(self):
app = models.get_app('commands_sql')
output = sql_create(app, no_style(), connections[DEFAULT_DB_ALIAS])
create_tables = [o for o in output if o.startswith('CREATE TABLE')]
self.assertEqual(len(create_tables), 3)
# Lower so that Oracle's upper case tbl names wont break
sql = create_tables[-1].lower()
six.assertRegex(self, sql, r'^create table .commands_sql_book.*')
def test_sql_delete(self):
app = models.get_app('commands_sql')
output = sql_delete(app, no_style(), connections[DEFAULT_DB_ALIAS])
drop_tables = [o for o in output if o.startswith('DROP TABLE')]
self.assertEqual(len(drop_tables), 3)
# Lower so that Oracle's upper case tbl names wont break
sql = drop_tables[-1].lower()
six.assertRegex(self, sql, r'^drop table .commands_sql_comment.*')
def test_sql_indexes(self):
app = models.get_app('commands_sql')
output = sql_indexes(app, no_style(), connections[DEFAULT_DB_ALIAS])
# PostgreSQL creates one additional index for CharField
self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def test_sql_destroy_indexes(self):
app = models.get_app('commands_sql')
output = sql_destroy_indexes(app, no_style(), connections[DEFAULT_DB_ALIAS])
# PostgreSQL creates one additional index for CharField
self.assertIn(self.count_ddl(output, 'DROP INDEX'), [3, 4])
def test_sql_all(self):
app = models.get_app('commands_sql')
output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS])
self.assertEqual(self.count_ddl(output, 'CREATE TABLE'), 3)
# PostgreSQL creates one additional index for CharField
self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
|
westernx/sgmock
|
refs/heads/master
|
sgmock/fixture/__init__.py
|
1
|
from .base import Fixture
# Silence pyflakes.
assert Fixture
|
chanceraine/nupic
|
refs/heads/master
|
tests/integration/nupic/opf/opf_checkpoint_test/experiments/non_temporal_multi_step/a_plus_b/description.py
|
114
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2011-2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
###############################################################################
# IMPORTANT!!!
# This params file is dynamically generated by the RunExperimentPermutations
# script. Any changes made manually will be over-written the next time
# RunExperimentPermutations is run!!!
###############################################################################
from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription
# the sub-experiment configuration
config ={
'modelParams' : {'sensorParams': {'encoders': {u'c0_timeOfDay': None, u'c0_dayOfWeek': None, u'c1': {'name': 'c1', 'clipInput': True, 'n': 275, 'fieldname': 'c1', 'w': 21, 'type': 'AdaptiveScalarEncoder'}, u'c0_weekend': None}}, 'spParams': {'synPermInactiveDec': 0.052500000000000005}, 'tpParams': {'minThreshold': 11, 'activationThreshold': 14, 'pamLength': 3}, 'clParams': {'alpha': 0.050050000000000004}},
'firstRecord': 0,
'lastRecord': 500,
}
mod = importBaseDescription('../base.py', config)
locals().update(mod.__dict__)
|
ehogan/iris
|
refs/heads/master
|
docs/iris/example_tests/test_deriving_phenomena.py
|
11
|
# (C) British Crown Copyright 2010 - 2016, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import Iris tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from .extest_util import (add_examples_to_path,
show_replaced_by_check_graphic,
fail_any_deprecation_warnings)
class TestDerivingPhenomena(tests.GraphicsTest):
"""Test the deriving_phenomena example code."""
def test_deriving_phenomena(self):
with fail_any_deprecation_warnings():
with add_examples_to_path():
import deriving_phenomena
with show_replaced_by_check_graphic(self):
deriving_phenomena.main()
if __name__ == '__main__':
tests.main()
|
Lujeni/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/pure.py
|
56
|
# -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Simon Dodsley <simon@purestorage.com>,2017
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
HAS_PURESTORAGE = True
try:
from purestorage import purestorage
except ImportError:
HAS_PURESTORAGE = False
HAS_PURITY_FB = True
try:
from purity_fb import PurityFb, FileSystem, FileSystemSnapshot, SnapshotSuffix, rest
except ImportError:
HAS_PURITY_FB = False
from functools import wraps
from os import environ
from os import path
import platform
VERSION = 1.2
USER_AGENT_BASE = 'Ansible'
API_AGENT_VERSION = 1.5
def get_system(module):
"""Return System Object or Fail"""
user_agent = '%(base)s %(class)s/%(version)s (%(platform)s)' % {
'base': USER_AGENT_BASE,
'class': __name__,
'version': VERSION,
'platform': platform.platform()
}
array_name = module.params['fa_url']
api = module.params['api_token']
if array_name and api:
system = purestorage.FlashArray(array_name, api_token=api, user_agent=user_agent)
elif environ.get('PUREFA_URL') and environ.get('PUREFA_API'):
system = purestorage.FlashArray(environ.get('PUREFA_URL'), api_token=(environ.get('PUREFA_API')), user_agent=user_agent)
else:
module.fail_json(msg="You must set PUREFA_URL and PUREFA_API environment variables or the fa_url and api_token module arguments")
try:
system.get()
except Exception:
module.fail_json(msg="Pure Storage FlashArray authentication failed. Check your credentials")
return system
def get_blade(module):
"""Return System Object or Fail"""
user_agent = '%(base)s %(class)s/%(version)s (%(platform)s)' % {
'base': USER_AGENT_BASE,
'class': __name__,
'version': VERSION,
'platform': platform.platform()
}
blade_name = module.params['fb_url']
api = module.params['api_token']
if blade_name and api:
blade = PurityFb(blade_name)
blade.disable_verify_ssl()
try:
blade.login(api)
if API_AGENT_VERSION in blade.api_version.list_versions().versions:
blade._api_client.user_agent = user_agent
except rest.ApiException as e:
module.fail_json(msg="Pure Storage FlashBlade authentication failed. Check your credentials")
elif environ.get('PUREFB_URL') and environ.get('PUREFB_API'):
blade = PurityFb(environ.get('PUREFB_URL'))
blade.disable_verify_ssl()
try:
blade.login(environ.get('PUREFB_API'))
if API_AGENT_VERSION in blade.api_version.list_versions().versions:
blade._api_client.user_agent = user_agent
except rest.ApiException as e:
module.fail_json(msg="Pure Storage FlashBlade authentication failed. Check your credentials")
else:
module.fail_json(msg="You must set PUREFB_URL and PUREFB_API environment variables or the fb_url and api_token module arguments")
return blade
def purefa_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
fa_url=dict(),
api_token=dict(no_log=True),
)
def purefb_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
fb_url=dict(),
api_token=dict(no_log=True),
)
|
jerbob92/CouchPotatoServer
|
refs/heads/master
|
libs/tornado/web.py
|
5
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
import uuid
from tornado.concurrent import Future
from tornado import escape
from tornado import httputil
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self,
application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
# Check since connection is not available in WSGI
if getattr(self.request, "connection", None):
self.request.connection.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Decorate this method with `.gen.coroutine`
or `.return_future` to make it asynchronous (the
`asynchronous` decorator cannot be used on `prepare`).
If this method returns a `.Future` execution will not proceed
until the `.Future` is done.
.. versionadded:: 3.1
Asynchronous support.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.time()),
})
self.set_default_headers()
if (not self.request.supports_http_1_1() and
getattr(self.request, 'connection', None) and
not self.request.connection.no_keep_alive):
conn_header = self.request.headers.get("Connection")
if conn_header and (conn_header.lower() == "keep-alive"):
self.set_header("Connection", "Keep-Alive")
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if (len(value) > 4000 or
RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise MissingArgumentError(name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = []
for v in self.request.arguments.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
return _unicode(value)
@property
def cookies(self):
"""An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
"""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self, path="/", domain=None):
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
"""
self.set_cookie(name, self.create_signed_value(name, value),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value)
def get_secure_cookie(self, name, value=None, max_age_days=31):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
utf8(url)))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
"""
if self.application._wsgi:
# WSGI applications cannot usefully support flush, so just make
# it a no-op (and run the callback immediately).
if callback is not None:
callback()
return
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = b""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers, callback=callback)
return
self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
self.set_etag_header()
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the HTTPConnection (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("debug") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(uuid.uuid4().bytes)
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if self.xsrf_token != token:
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None, **kwargs):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
This method returns a versioned url (by default appending
``?v=<signature>``), which allows the static files to be
cached indefinitely. This can be disabled by passing
``include_version=False`` (in the default implementation;
other static file implementations are not required to support
this, but they may support other options).
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
get_url = self.settings.get("static_handler_class",
StaticFileHandler).make_static_url
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + get_url(self.settings, path, **kwargs)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
By default uses a hash of the content written so far.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def set_etag_header(self):
"""Sets the response's Etag header using ``self.compute_etag()``.
Note: no header will be set if ``compute_etag()`` returns ``None``.
This method is called automatically when the request is finished.
"""
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
def check_etag_header(self):
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
Returns ``True`` if the request's Etag matches and a 304 should be
returned. For example::
self.set_etag_header()
if self.check_etag_header():
self.set_status(304)
return
This method is called automatically when the request is finished,
but may be called earlier for applications that override
`compute_etag` and want to do an early check for ``If-None-Match``
before completing the request. The ``Etag`` header should be set
(perhaps with `set_etag_header`) before calling this method.
"""
etag = self._headers.get("Etag")
inm = utf8(self.request.headers.get("If-None-Match", ""))
return bool(etag and inm and inm.find(etag) >= 0)
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
self._when_complete(self.prepare(), self._execute_method)
except Exception as e:
self._handle_request_exception(e)
def _when_complete(self, result, callback):
try:
if result is None:
callback()
elif isinstance(result, Future):
if result.done():
if result.result() is not None:
raise ValueError('Expected None, got %r' % result.result())
callback()
else:
# Delayed import of IOLoop because it's not available
# on app engine
from tornado.ioloop import IOLoop
IOLoop.current().add_future(
result, functools.partial(self._when_complete,
callback=callback))
else:
raise ValueError("Expected Future or None, got %r" % result)
except Exception as e:
self._handle_request_exception(e)
def _execute_method(self):
if not self._finished:
method = getattr(self, self.request.method.lower())
self._when_complete(method(*self.path_args, **self.path_kwargs),
self._execute_finish)
def _execute_finish(self):
if self._auto_finish and not self._finished:
self.finish()
def _generate_headers(self):
reason = self._reason
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + reason)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b"\r\n".join(lines) + b"\r\n\r\n"
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
self.log_exception(*sys.exc_info())
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(self, typ, value, tb):
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
.. versionadded:: 3.1
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = ([value.status_code, self._request_summary()] +
list(value.args))
gen_log.warning(format, *args)
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=(typ, value, tb))
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
This decorator is unnecessary if the method is also decorated with
``@gen.coroutine`` (it is legal but unnecessary to use the two
decorators together, in which case ``@asynchronous`` must be
first).
This decorator should only be applied to the :ref:`HTTP verb
methods <verbs>`; its behavior is undefined for any other method.
This decorator does not *make* a method asynchronous; it tells
the framework that the method *is* asynchronous. For this decorator
to be useful the method must (at least sometimes) do something
asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
.. versionadded:: 3.1
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if isinstance(result, Future):
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
# the Future will swallow any exceptions so we need
# to throw them back out to the stack context to finish
# the request.
def future_complete(f):
f.result()
if not self._finished:
self.finish()
IOLoop.current().add_future(result, future_complete)
return result
return wrapper
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
Each tuple can contain an optional third element, which should be
a dictionary if it is present. That dictionary is passed as
keyword arguments to the contructor of the handler. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi:
from tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, (tuple, list)):
assert len(spec) in (2, 3)
pattern = spec[0]
handler = spec[1]
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
if len(spec) == 3:
kwargs = spec[2]
else:
kwargs = {}
spec = URLSpec(pattern, handler, kwargs)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
self, request, url="http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
handler = spec.handler_class(self, request, **spec.kwargs)
if spec.regex.groups:
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
if s is None:
return s
return escape.url_unescape(s, encoding=None,
plus=False)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
# Note that args are passed as bytes so the handler can
# decide what encoding to use.
if spec.regex.groupindex:
kwargs = dict(
(str(k), unquote(v))
for (k, v) in match.groupdict().items())
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
handler = ErrorHandler(self, request, status_code=404)
# In debug mode, re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if self.settings.get("debug"):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
StaticFileHandler.reset()
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
.. versionadded:: 3.1
"""
def __init__(self, arg_name):
super(MissingArgumentError, self).__init__(
400, 'Missing argument %s' % arg_name)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
A `StaticFileHandler` is configured automatically if you pass the
``static_path`` keyword argument to `Application`. This handler
can be customized with the ``static_url_prefix``, ``static_handler_class``,
and ``static_handler_args`` settings.
To map an additional path to this handler for a static data directory
you would add a line to your application like::
application = web.Application([
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To maximize the effectiveness of browser caching, this class supports
versioned urls (by default using the argument ``?v=``). If a version
is given, we instruct the browser to cache this file indefinitely.
`make_static_url` (also available as `RequestHandler.static_url`) can
be used to construct a versioned url.
This handler is intended primarily for use in development and light-duty
file serving; for heavy traffic it will be more efficient to use
a dedicated static file server (such as nginx or Apache). We support
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
some browsers require this functionality to be present to seek in
HTML5 audio or video), but this handler should not be used with
files that are too large to fit comfortably in memory.
**Subclassing notes**
This class is designed to be extensible by subclassing, but because
of the way static urls are generated with class methods rather than
instance methods, the inheritance patterns are somewhat unusual.
Be sure to use the ``@classmethod`` decorator when overriding a
class method. Instance methods may use the attributes ``self.path``
``self.absolute_path``, and ``self.modified``.
To change the way static urls are generated (e.g. to match the behavior
of another server or CDN), override `make_static_url`, `parse_url_path`,
`get_cache_time`, and/or `get_version`.
To replace all interaction with the filesystem (e.g. to serve
static content from a database), override `get_content`,
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
`validate_absolute_path`.
.. versionchanged:: 3.1
Many of the methods for subclasses were added in Tornado 3.1.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = path
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(
self.root, absolute_path)
if self.absolute_path is None:
return
self.modified = self.get_modified_time()
self.set_headers()
if self.should_return_304():
self.set_status(304)
return
request_range = None
range_header = self.request.headers.get("Range")
if range_header:
# As per RFC 2616 14.16, if an invalid Range header is specified,
# the request will be treated as if the header didn't exist.
request_range = httputil._parse_request_range(range_header)
if request_range:
start, end = request_range
size = self.get_content_size()
if (start is not None and start >= size) or end == 0:
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
# the first requested byte is equal to or greater than the
# content, or when a suffix with length 0 is specified
self.set_status(416) # Range Not Satisfiable
self.set_header("Content-Type", "text/plain")
self.set_header("Content-Range", "bytes */%s" %(size, ))
return
if start is not None and start < 0:
start += size
if end is not None and end > size:
# Clients sometimes blindly use a large range to limit their
# download size; cap the endpoint at the actual file size.
end = size
# Note: only return HTTP 206 if less than the entire range has been
# requested. Not only is this semantically correct, but Chrome
# refuses to play audio if it gets an HTTP 206 in response to
# ``Range: bytes=0-``.
if size != (end or size) - (start or 0):
self.set_status(206) # Partial Content
self.set_header("Content-Range",
httputil._get_content_range(start, end, size))
else:
start = end = None
content = self.get_content(self.absolute_path, start, end)
if isinstance(content, bytes_type):
content = [content]
content_length = 0
for chunk in content:
if include_body:
self.write(chunk)
else:
content_length += len(chunk)
if not include_body:
assert self.request.method == "HEAD"
self.set_header("Content-Length", content_length)
def compute_etag(self):
"""Sets the ``Etag`` header based on static url version.
This allows efficient ``If-None-Match`` checks against cached
versions, and sends the correct ``Etag`` for a partial response
(i.e. the same ``Etag`` as the full file).
.. versionadded:: 3.1
"""
version_hash = self._get_cached_version(self.absolute_path)
if not version_hash:
return None
return '"%s"' % (version_hash, )
def set_headers(self):
"""Sets the content and caching headers on the response.
.. versionadded:: 3.1
"""
self.set_header("Accept-Ranges", "bytes")
self.set_etag_header()
if self.modified is not None:
self.set_header("Last-Modified", self.modified)
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
cache_time = self.get_cache_time(self.path, self.modified, content_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(self.path)
def should_return_304(self):
"""Returns True if the headers indicate that we should return 304.
.. versionadded:: 3.1
"""
if self.check_etag_header():
return True
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if date_tuple is not None:
if_since = datetime.datetime(*date_tuple[:6])
if if_since >= self.modified:
return True
return False
@classmethod
def get_absolute_path(cls, root, path):
"""Returns the absolute location of ``path`` relative to ``root``.
``root`` is the path configured for this `StaticFileHandler`
(in most cases the ``static_path`` `Application` setting).
This class method may be overridden in subclasses. By default
it returns a filesystem path, but other strings may be used
as long as they are unique and understood by the subclass's
overridden `get_content`.
.. versionadded:: 3.1
"""
abspath = os.path.abspath(os.path.join(root, path))
return abspath
def validate_absolute_path(self, root, absolute_path):
"""Validate and return the absolute path.
``root`` is the configured path for the `StaticFileHandler`,
and ``path`` is the result of `get_absolute_path`
This is an instance method called during request processing,
so it may raise `HTTPError` or use methods like
`RequestHandler.redirect` (return None after redirecting to
halt further processing). This is where 404 errors for missing files
are generated.
This method may modify the path before returning it, but note that
any such modifications will not be understood by `make_static_url`.
In instance methods, this method's result is available as
``self.absolute_path``.
.. versionadded:: 3.1
"""
root = os.path.abspath(root)
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (absolute_path + os.path.sep).startswith(root):
raise HTTPError(403, "%s is not in root static directory",
self.path)
if (os.path.isdir(absolute_path) and
self.default_filename is not None):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
@classmethod
def get_content(cls, abspath, start=None, end=None):
"""Retrieve the content of the requested resource which is located
at the given absolute path.
This class method may be overridden by subclasses. Note that its
signature is different from other overridable class methods
(no ``settings`` argument); this is deliberate to ensure that
``abspath`` is able to stand on its own as a cache key.
This method should either return a byte string or an iterator
of byte strings. The latter is preferred for large files
as it helps reduce memory fragmentation.
.. versionadded:: 3.1
"""
with open(abspath, "rb") as file:
if start is not None:
file.seek(start)
if end is not None:
remaining = end - (start or 0)
else:
remaining = None
while True:
chunk_size = 64 * 1024
if remaining is not None and remaining < chunk_size:
chunk_size = remaining
chunk = file.read(chunk_size)
if chunk:
if remaining is not None:
remaining -= len(chunk)
yield chunk
else:
if remaining is not None:
assert remaining == 0
return
@classmethod
def get_content_version(cls, abspath):
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
if isinstance(data, bytes_type):
hasher.update(data)
else:
for chunk in data:
hasher.update(chunk)
return hasher.hexdigest()
def _stat(self):
if not hasattr(self, '_stat_result'):
self._stat_result = os.stat(self.absolute_path)
return self._stat_result
def get_content_size(self):
"""Retrieve the total size of the resource at the given path.
This method may be overridden by subclasses. It will only
be called if a partial result is requested from `get_content`
.. versionadded:: 3.1
"""
stat_result = self._stat()
return stat_result[stat.ST_SIZE]
def get_modified_time(self):
"""Returns the time that ``self.absolute_path`` was last modified.
May be overridden in subclasses. Should return a `~datetime.datetime`
object or None.
.. versionadded:: 3.1
"""
stat_result = self._stat()
modified = datetime.datetime.utcfromtimestamp(stat_result[stat.ST_MTIME])
return modified
def get_content_type(self):
"""Returns the ``Content-Type`` header to be used for this request.
.. versionadded:: 3.1
"""
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
return mime_type
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path, include_version=True):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it
is a class method rather than an instance method). Subclasses
are only required to implement the signature
``make_static_url(cls, settings, path)``; other keyword
arguments may be passed through `~RequestHandler.static_url`
but are not standard.
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
``include_version`` determines whether the generated URL should
include the query string containing the version hash of the
file corresponding to the given ``path``.
"""
url = settings.get('static_url_prefix', '/static/') + path
if not include_version:
return url
version_hash = cls.get_version(settings, path)
if not version_hash:
return url
return '%s?v=%s' % (url, version_hash)
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
This is the inverse of `make_static_url`.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
.. versionchanged:: 3.1
This method was previously recommended for subclasses to override;
`get_content_version` is now preferred as it allows the base
class to handle caching of the result.
"""
abs_path = cls.get_absolute_path(settings['static_path'], path)
return cls._get_cached_version(abs_path)
@classmethod
def _get_cached_version(cls, abs_path):
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
hashes[abs_path] = cls.get_content_version(abs_path)
except Exception:
gen_log.error("Could not open static file %r", abs_path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh
return None
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httpserver.HTTPRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, status_code, headers, chunk, finishing):
# 304 responses have no body (not even a zero-length body), and so
# should not have either Content-Length or Transfer-Encoding headers.
if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
if finishing:
block += b"0\r\n\r\n"
return block
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.locale = handler.locale
@property
def current_user(self):
return self.handler.current_user
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(self, handler, ui_modules):
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key):
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler_class, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
self.handler_class = handler_class
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value):
timestamp = utf8(str(int(time.time())))
value = base64.b64encode(utf8(value))
signature = _create_signature(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
def decode_signed_value(secret, name, value, max_age_days=31):
if not value:
return None
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > time.time() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _create_signature(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
|
glenux/contrib-mypaint
|
refs/heads/master
|
gui/colors/paletteview.py
|
2
|
# This file is part of MyPaint.
# Copyright (C) 2012 by Andrew Chadwick <andrewc-git@piffle.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Viewer and editor widgets for palettes."""
# Editor ideas:
# - Interpolate between two colors, into empty slots
# - "Insert lighter/darker copy of row".
# - repack palette (remove duplicates and blanks)
# - sort palette by approx. hue+chroma binning, then luma variations
## Imports
import math
from copy import deepcopy
import os
import re
import logging
logger = logging.getLogger(__name__)
import gi
from gi.repository import Gdk
from gi.repository import Gtk
import cairo
from gettext import gettext as _
from lib.observable import event
from util import clamp
from palette import Palette
from uicolor import RGBColor
from uicolor import HCYColor
## Imports still requiring gtk2compat
if __name__ == '__main__':
import gui.gtk2compat
from uimisc import borderless_button
from adjbases import ColorAdjuster
from adjbases import ColorAdjusterWidget
from adjbases import ColorManager
from adjbases import DATAPATH_PALETTES_SUBDIR
from combined import CombinedAdjusterPage
## Class defs
class PalettePage (CombinedAdjusterPage):
"""User-editable palette, as a `CombinedAdjuster` element.
"""
def __init__(self):
view = PaletteView()
view.grid.show_matched_color = True
view.can_select_empty = False
self._adj = view
self._edit_dialog = None
@classmethod
def get_properties_description(class_):
return _("Palette properties")
@classmethod
def get_page_icon_name(self):
return "mypaint-tool-color-palette"
@classmethod
def get_page_title(self):
return _("Palette")
@classmethod
def get_page_description(self):
return _("Set the color from a loadable, editable palette.")
def get_page_widget(self):
"""Page widget: returns the PaletteView adjuster widget itself."""
# FIXME: The PaletteNext and PalettePrev actions of the main
# app require access to the PaletteView itself.
return self._adj
def set_color_manager(self, manager):
CombinedAdjusterPage.set_color_manager(self, manager)
self._adj.set_color_manager(manager)
def show_properties(self):
if self._edit_dialog is None:
toplevel = self._adj.get_toplevel()
dialog = PaletteEditorDialog(toplevel, self.get_color_manager())
self._edit_dialog = dialog
self._edit_dialog.run()
class PaletteEditorDialog (Gtk.Dialog):
"""Dialog for editing, loading and saving the current palette.
"""
def __init__(self, parent, target_color_manager):
flags = Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT
Gtk.Dialog.__init__(self, _("Palette Editor"), parent, flags,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
self.set_position(Gtk.WindowPosition.MOUSE)
assert isinstance(target_color_manager, ColorManager)
#: ColorManager containing the palette to be edited.
self._target_color_manager = target_color_manager
view = PaletteView()
view.set_size_request(400, 300)
view.grid.show_matched_color = True
view.grid.can_select_empty = True
self._view = view
#: The working ColorManager, holding a working copy of the palette
#: to be edited.
self._mgr = ColorManager(prefs={},
datapath=target_color_manager.get_data_path())
self._mgr.set_color(RGBColor(1, 1, 1))
view.set_color_manager(self._mgr)
# Action buttons, positiopned down the right hand side
action_bbox = Gtk.VButtonBox()
load_btn = self._load_button = Gtk.Button(stock=Gtk.STOCK_OPEN)
save_btn = self._save_button = Gtk.Button(stock=Gtk.STOCK_SAVE)
add_btn = self._add_button = Gtk.Button(stock=Gtk.STOCK_ADD)
remove_btn = self._remove_button = Gtk.Button(stock=Gtk.STOCK_REMOVE)
clear_btn = self._clear_button = Gtk.Button(stock=Gtk.STOCK_CLEAR)
action_bbox.pack_start(load_btn)
action_bbox.pack_start(save_btn)
action_bbox.pack_start(add_btn)
action_bbox.pack_start(remove_btn)
action_bbox.pack_start(clear_btn)
action_bbox.set_layout(Gtk.ButtonBoxStyle.START)
load_btn.connect("clicked", self._load_btn_clicked)
save_btn.connect("clicked", self._save_btn_clicked)
remove_btn.connect("clicked", self._remove_btn_clicked)
add_btn.connect("clicked", self._add_btn_clicked)
clear_btn.connect("clicked", self._clear_btn_clicked)
load_btn.set_tooltip_text(_("Load from a GIMP palette file"))
save_btn.set_tooltip_text(_("Save to a GIMP palette file"))
add_btn.set_tooltip_text(_("Add a new empty swatch"))
remove_btn.set_tooltip_text(_("Remove the current swatch"))
clear_btn.set_tooltip_text(_("Remove all swatches"))
# Button initial state and subsequent updates
remove_btn.set_sensitive(False)
self._mgr.palette.match_changed += self._palette_match_changed_cb
self._mgr.palette.info_changed += self._palette_changed_cb
self._mgr.palette.sequence_changed += self._palette_changed_cb
self._mgr.palette.color_changed += self._palette_changed_cb
# Palette name and number of entries
palette_details_hbox = Gtk.HBox()
palette_name_label = Gtk.Label(_("Name:"))
palette_name_label.set_tooltip_text(_("Name or description for"
" this palette"))
palette_name_entry = Gtk.Entry()
palette_name_entry.connect("changed", self._palette_name_changed_cb)
self._palette_name_entry = palette_name_entry
self._columns_adj = Gtk.Adjustment(
value=0, lower=0, upper=99,
step_incr=1, page_incr=1, page_size=0
)
self._columns_adj.connect("value-changed", self._columns_changed_cb)
columns_label = Gtk.Label(_("Columns:"))
columns_label.set_tooltip_text(_("Number of columns"))
columns_label.set_tooltip_text(_("Number of columns"))
columns_spinbutton = Gtk.SpinButton(
adjustment=self._columns_adj,
climb_rate=1.5,
digits=0
)
palette_details_hbox.set_spacing(0)
palette_details_hbox.set_border_width(0)
palette_details_hbox.pack_start(palette_name_label, False, False, 0)
palette_details_hbox.pack_start(palette_name_entry, True, True, 6)
palette_details_hbox.pack_start(columns_label, False, False, 6)
palette_details_hbox.pack_start(columns_spinbutton, False, False, 0)
color_name_hbox = Gtk.HBox()
color_name_label = Gtk.Label(_("Color name:"))
color_name_label.set_tooltip_text(_("Current color's name"))
color_name_entry = Gtk.Entry()
color_name_entry.connect("changed", self._color_name_changed_cb)
color_name_entry.set_sensitive(False)
self._color_name_entry = color_name_entry
color_name_hbox.set_spacing(6)
color_name_hbox.pack_start(color_name_label, False, False, 0)
color_name_hbox.pack_start(color_name_entry, True, True, 0)
palette_vbox = Gtk.VBox()
palette_vbox.set_spacing(12)
palette_vbox.pack_start(palette_details_hbox, False, False)
palette_vbox.pack_start(view, True, True)
palette_vbox.pack_start(color_name_hbox, False, False)
# Dialog contents
# Main edit area to the left, buttons to the right
hbox = Gtk.HBox()
hbox.set_spacing(12)
hbox.pack_start(palette_vbox, True, True)
hbox.pack_start(action_bbox, False, False)
hbox.set_border_width(12)
self.vbox.pack_start(hbox, True, True)
# Dialog vbox contents must be shown separately
for w in self.vbox:
w.show_all()
self.connect("response", self._response_cb)
self.connect("show", self._show_cb)
def _show_cb(self, widget, *a):
# Each time the dialog is shown, update with the target
# palette, for editing.
self.vbox.show_all()
palette = self._target_color_manager.palette
name = palette.get_name()
if name is None:
name = ""
self._palette_name_entry.set_text(name)
self._columns_adj.set_value(palette.get_columns())
self._mgr.palette.update(palette)
def _palette_name_changed_cb(self, editable):
name = editable.get_chars(0, -1)
if name == "":
name = None
pal = self._mgr.palette
pal.name = unicode(name)
def _columns_changed_cb(self, adj):
ncolumns = int(adj.get_value())
pal = self._mgr.palette
pal.set_columns(ncolumns)
def _color_name_changed_cb(self, editable):
name = editable.get_chars(0, -1)
palette = self._mgr.palette
i = palette.match_position
if i is None:
return
old_name = palette.get_color_name(i)
if name == "":
name = None
if name != old_name:
palette.set_color_name(i, name)
def _response_cb(self, widget, response_id):
if response_id == Gtk.ResponseType.ACCEPT:
palette = self._mgr.palette
target_palette = self._target_color_manager.palette
target_palette.update(palette)
self.hide()
return True
def _palette_match_changed_cb(self, palette):
col_name_entry = self._color_name_entry
i = palette.match_position
if i is not None:
col = palette[i]
if col is not None:
name = palette.get_color_name(i)
if name is None:
name = ""
col_name_entry.set_sensitive(True)
col_name_entry.set_text(name)
else:
col_name_entry.set_sensitive(False)
col_name_entry.set_text(_("Empty palette slot"))
else:
col_name_entry.set_sensitive(False)
col_name_entry.set_text("")
self._update_buttons()
def _update_buttons(self):
palette = self._mgr.palette
emptyish = len(palette) == 0
if len(palette) == 1:
if palette[0] is None:
emptyish = True
can_save = not emptyish
can_clear = not emptyish
can_remove = True
if emptyish or self._mgr.palette.match_position is None:
can_remove = False
self._save_button.set_sensitive(can_save)
self._remove_button.set_sensitive(can_remove)
self._clear_button.set_sensitive(can_clear)
def _palette_changed_cb(self, palette, *args, **kwargs):
new_name = palette.get_name()
if new_name is None:
new_name = ""
old_name = self._palette_name_entry.get_chars(0, -1)
if old_name != new_name:
self._palette_name_entry.set_text(new_name)
self._columns_adj.set_value(palette.get_columns())
self._update_buttons()
def _add_btn_clicked(self, button):
palette = self._mgr.palette
i = palette.match_position
if i is None:
i = len(palette)
palette.append(None)
palette.match_position = i
else:
palette.insert(i, None)
def _remove_btn_clicked(self, button):
palette = self._mgr.palette
i = palette.match_position
if i >= 0 and i < len(palette):
palette.pop(i)
if len(palette) == 0:
palette.append(None)
def _load_btn_clicked(self, button):
preview = _PalettePreview()
manager = self._target_color_manager
datapath = manager.get_data_path()
palettes_dir = os.path.join(datapath, DATAPATH_PALETTES_SUBDIR)
palette = palette_load_via_dialog(title=_("Load palette"),
parent=self,
preview=preview,
shortcuts=[palettes_dir])
if palette is not None:
self._mgr.palette.update(palette)
def _save_btn_clicked(self, button):
preview = _PalettePreview()
palette_save_via_dialog(self._mgr.palette, title=_("Save palette"),
parent=self, preview=preview)
def _clear_btn_clicked(self, button):
pal = self._mgr.palette
pal.clear()
class PaletteView (ColorAdjuster, Gtk.ScrolledWindow):
"""Scrollable view of a palette.
Palette entries can be clicked to select the color, and all instances of
the current shared color in the palette are highlighted.
"""
## Sizing contraint constants
_MIN_HEIGHT = 32
_MIN_WIDTH = 150
_MAX_NATURAL_HEIGHT = 300
_MAX_NATURAL_WIDTH = 300
def __init__(self):
Gtk.ScrolledWindow.__init__(self)
self.grid = _PaletteGridLayout()
self.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
self.add_with_viewport(self.grid)
def set_color_manager(self, mgr):
self.grid.set_color_manager(mgr)
ColorAdjuster.set_color_manager(self, mgr)
## Sizing boilerplate
# Reflect what the embedded grid widget tells us, but limit its natural
# size to something sensible. Huge palettes make huge grids...
def do_get_request_mode(self):
return self.grid.get_request_mode()
def do_get_preferred_width(self):
gminw, gnatw = self.grid.get_preferred_width()
minw = self._MIN_WIDTH
natw = min(gnatw, self._MAX_NATURAL_WIDTH)
return minw, max(minw, natw)
def do_get_preferred_height(self):
gminh, gnath = self.grid.get_preferred_height()
minh = self._MIN_HEIGHT
nath = min(gnath, self._MAX_NATURAL_HEIGHT)
return minh, max(minh, nath)
def do_get_preferred_width_for_height(self, height):
gminw, gnatw = self.grid.get_preferred_width_for_height(height)
minw = self._MIN_WIDTH
natw = min(gnatw, self._MAX_NATURAL_WIDTH)
return minw, max(minw, natw)
def do_get_preferred_height_for_width(self, width):
gminh, gnath = self.grid.get_preferred_height_for_width(width)
minh = self._MIN_HEIGHT
nath = min(gnath, self._MAX_NATURAL_HEIGHT)
return minh, max(minh, nath)
class _PalettePreview (Gtk.DrawingArea):
"""Preview-only palette view."""
_palette = None
def __init__(self):
Gtk.DrawingArea.__init__(self)
self.connect("draw", self._draw_cb)
self.set_size_request(128, 256)
def _draw_cb(self, widget, cr):
if self._palette is None:
return
alloc = widget.get_allocation()
w, h = alloc.width, alloc.height
s_max = 16 # min(w, h)
s_min = 4
ncolumns = self._palette.get_columns()
ncolors = len(self._palette)
if ncolors == 0:
return
if not ncolumns == 0:
s = w / ncolumns
s = clamp(s, s_min, s_max)
s = int(s)
if s*ncolumns > w:
ncolumns = 0
if ncolumns == 0:
s = math.sqrt(float(w*h) / ncolors)
s = clamp(s, s_min, s_max)
s = int(s)
ncolumns = max(1, int(w / s))
nrows = int(ncolors // ncolumns)
if ncolors % ncolumns != 0:
nrows += 1
nrows = max(1, nrows)
dx, dy = 0, 0
if nrows*s < h:
dy = int(h - nrows*s) / 2
if ncolumns*s < w:
dx = int(w - ncolumns*s) / 2
state = self.get_state_flags()
style = self.get_style_context()
bg_rgba = style.get_background_color(state)
bg_color = RGBColor.new_from_gdk_rgba(bg_rgba)
_palette_render(self._palette, cr, rows=nrows, columns=ncolumns,
swatch_size=s, bg_color=bg_color,
offset_x=dx, offset_y=dy,
rtl=False)
def set_palette(self, palette):
self._palette = palette
self.queue_draw()
class _PaletteGridLayout (ColorAdjusterWidget):
"""The palette layout embedded in a scrolling PaletteView.
"""
## Class settings
IS_DRAG_SOURCE = True
HAS_DETAILS_DIALOG = True
STATIC_TOOLTIP_TEXT = _("Color swatch palette.\nDrop colors here,\n"
"drag them to organize.")
## Layout constants
_SWATCH_SIZE_MIN = 8
_SWATCH_SIZE_MAX = 50
_SWATCH_SIZE_NOMINAL = 20
_PREFERRED_COLUMNS = 5 #: Preferred width in cells for free-flow mode.
def __init__(self):
ColorAdjusterWidget.__init__(self)
# Sizing
s = self._SWATCH_SIZE_NOMINAL
self.set_size_request(s, s)
self.connect("size-allocate", self._size_alloc_cb)
#: Highlight the currently matched color
self.show_matched_color = False
#: User can click on empty slots
self.can_select_empty = False
# Current index
self.connect("button-press-event", self._button_press_cb)
self.connect_after("button-release-event", self._button_release_cb)
# Dragging
self._drag_insertion_index = None
self.connect("motion-notify-event", self._motion_notify_cb)
self.add_events(Gdk.EventMask.POINTER_MOTION_MASK)
# Tooltips
self._tooltip_index = None
self.set_has_tooltip(True)
# Cached layout details
self._rows = None
self._columns = None
self._swatch_size = self._SWATCH_SIZE_NOMINAL
def _size_alloc_cb(self, widget, alloc):
"""Caches layout details after size negotiation.
"""
width = alloc.width
height = alloc.height
ncolors, nrows, ncolumns = self._get_palette_dimensions()
if nrows and ncolumns:
# Fitted to the major dimension
size = int(min(width/ncolumns, height/nrows))
size = self._constrain_swatch_size(size)
else:
# Free-flowing
if ncolors > 0:
size = int(math.sqrt(float(width*height) / ncolors))
size = self._constrain_swatch_size(size)
ncolumns = max(1, min(ncolors, width / size))
nrows = max(1, int(ncolors / ncolumns))
if int(ncolors % ncolumns) > 0:
nrows += 1
if nrows * size > height or ncolumns * size > width:
size = max(1, min(int(height / nrows),
int(width / ncolumns)))
size = self._constrain_swatch_size(size)
ncolumns = max(1, min(ncolors, width / size))
nrows = max(1, int(ncolors / ncolumns))
if int(ncolors % ncolumns) > 0:
nrows += 1
else:
nrows = 0
ncolumns = 0
size = self._SWATCH_SIZE_NOMINAL
self._rows = nrows
self._columns = ncolumns
self._swatch_size = size
## Palette monitoring
def set_color_manager(self, mgr):
ColorAdjusterWidget.set_color_manager(self, mgr)
# Could be smarter about these: probably no need to redraw on
# every little change.
mgr.palette.info_changed += self._palette_changed_cb
mgr.palette.match_changed += self._palette_changed_cb
mgr.palette.sequence_changed += self._palette_changed_cb
mgr.palette.color_changed += self._palette_changed_cb
def _palette_changed_cb(self, palette, *args, **kwargs):
"""Called after each change made to the palette."""
# Determine if the layout has changed since the last time the palette
# was draw.
layout_changed = False
if None in (self._rows, self._columns):
logger.debug("layout changed: null preexisting layout info")
layout_changed = True
if not layout_changed and palette.columns is not None:
layout_changed = palette.columns != self._columns
if layout_changed:
logger.debug("layout changed: different number of columns")
if not layout_changed:
ncells = self._rows * self._columns
ncolors = len(palette)
if ncolors > ncells or ncolors <= ncells - self._columns:
logger.debug("layout changed: cannot fit palette into "
"currently calculated space")
layout_changed = True
# Queue a resize (and an implicit redraw) if the layout has changed,
# or just a redraw.
if layout_changed:
self._rows = None
self._columns = None
self.queue_resize()
self._drag_insertion_index = None
self._tooltip_index = None
else:
logger.debug("layout unchanged, redraw")
self.queue_draw()
## Pointer event handling
def _motion_notify_cb(self, widget, event):
x, y = event.x, event.y
i = self.get_index_at_pos(x, y)
# Set the tooltip.
# Passing the tooltip through a value of None is necessary for its
# position on the screen to be updated to where the pointer is. Setting
# it to None, and then to the desired value must happen in two separate
# events for the tooltip window position update to be honoured.
if i is None:
# Not over a color, so use the static default
if self._tooltip_index not in (-1, -2):
# First such event: reset the tooltip.
self._tooltip_index = -1
self.set_has_tooltip(False)
self.set_tooltip_text("")
elif self._tooltip_index != -2:
# Second event over a non-color: set the tooltip text.
self._tooltip_index = -2
self.set_has_tooltip(True)
self.set_tooltip_text(self.STATIC_TOOLTIP_TEXT)
elif self._tooltip_index != i:
# Mouse pointer has moved to a different color, or away
# from the two states above.
if self._tooltip_index is not None:
# First event for this i: reset the tooltip.
self._tooltip_index = None
self.set_has_tooltip(False)
self.set_tooltip_text("")
else:
# Second event for this i: set the desired tooltip text.
self._tooltip_index = i
mgr = self.get_color_manager()
tip = mgr.palette.get_color_name(i)
color = mgr.palette.get_color(i)
if color is None:
tip = _("Empty palette slot (drag a color here)")
elif tip is None or tip.strip() == "":
tip = "" # Anonymous colors don't get tooltips
self.set_has_tooltip(True)
self.set_tooltip_text(tip)
def _button_press_cb(self, widget, event):
"""Select color on a single click."""
if event.type == Gdk.EventType.BUTTON_PRESS:
if event.button == 1:
x, y = event.x, event.y
i = self.get_index_at_pos(x, y)
mgr = self.get_color_manager()
if not self.can_select_empty:
if mgr.palette.get_color(i) is None:
return False
mgr.palette.set_match_position(i)
mgr.palette.set_match_is_approx(False)
def _button_release_cb(self, widget, event):
pass
## Dimensions and sizing
@classmethod
def _constrain_swatch_size(cls, size):
size = min(cls._SWATCH_SIZE_MAX, max(cls._SWATCH_SIZE_MIN, size))
# Restrict to multiples of 2 for patterns, plus one for the border
if size % 2 == 0:
size -= 1
return size
def _get_palette_dimensions(self):
"""Normalized palette dimensions: (ncolors, nrows, ncolumns).
Row and columns figures are None if the layout is to be free-flowing.
"""
mgr = self.get_color_manager()
ncolumns = mgr.palette.get_columns()
ncolors = len(mgr.palette)
if ncolumns is None or ncolumns < 1:
nrows = None
ncolumns = None
else:
ncolumns = int(ncolumns)
if ncolors > 0:
ncolumns = min(ncolumns, ncolors)
nrows = max(1, int(ncolors / ncolumns))
if int(ncolors % ncolumns) > 0:
nrows += 1
else:
ncolumns = 1
nrows = 1
return (ncolors, nrows, ncolumns)
def do_get_request_mode(self):
"""GtkWidget size negotiation implementation
"""
ncolors, nrows, ncolumns = self._get_palette_dimensions()
mode = Gtk.SizeRequestMode.HEIGHT_FOR_WIDTH
if nrows and ncolumns:
if nrows > ncolumns:
mode = Gtk.SizeRequestMode.WIDTH_FOR_HEIGHT
return mode
def do_get_preferred_width(self):
"""GtkWidget size negotiation implementation.
"""
ncolors, nrows, ncolumns = self._get_palette_dimensions()
if ncolumns and ncolumns:
# Horizontal fit, assume rows <= columns
min_w = self._SWATCH_SIZE_MIN * ncolumns
nat_w = self._SWATCH_SIZE_NOMINAL * ncolumns
else:
# Free-flowing, across and then down
ncolumns = max(1, min(self._PREFERRED_COLUMNS, ncolors))
min_w = self._SWATCH_SIZE_MIN
nat_w = self._SWATCH_SIZE_NOMINAL * ncolumns
return min_w, max(min_w, nat_w)
def do_get_preferred_height_for_width(self, width):
"""GtkWidget size negotiation implementation.
"""
ncolors, nrows, ncolumns = self._get_palette_dimensions()
if nrows and ncolumns:
# Horizontal fit
swatch_size = self._constrain_swatch_size(int(width / ncolumns))
min_h = self._SWATCH_SIZE_MIN * nrows
nat_h = swatch_size * nrows
else:
# Free-flowing, across and then down
# Since s = sqrt((w*h)/n),
min_h = int((((self._SWATCH_SIZE_MIN)**2)*ncolors) / width)
nat_h = int((((self._SWATCH_SIZE_NOMINAL)**2)*ncolors) / width)
return min_h, max(min_h, nat_h)
def do_get_preferred_height(self):
"""GtkWidget size negotiation implementation.
"""
ncolors, nrows, ncolumns = self._get_palette_dimensions()
if nrows and ncolumns:
# Vertical fit, assume rows > columns
min_h = self._SWATCH_SIZE_MIN * nrows
nat_h = self._SWATCH_SIZE_NOMINAL * nrows
else:
# Height required for our own minimum width (note do_())
min_w, nat_w = self.do_get_preferred_width()
min_h, nat_h = self.do_get_preferred_height_for_width(min_w)
return min_h, max(min_h, nat_h)
def do_get_preferred_width_for_height(self, height):
"""GtkWidget size negotiation implementation.
"""
ncolors, nrows, ncolumns = self._get_palette_dimensions()
if nrows and ncolumns:
# Vertical fit
swatch_size = self._constrain_swatch_size(int(height / nrows))
min_w = self._SWATCH_SIZE_MIN * ncolumns
nat_w = swatch_size * ncolumns
else:
# Just the minimum and natural width (note do_())
min_w, nat_w = self.do_get_preferred_width()
return min_w, max(min_w, nat_w)
def _get_background_size(self):
# HACK. it's quicker for this widget to render in the foreground
return 1, 1
def get_background_validity(self):
return 1
def render_background_cb(self, cr, wd, ht):
return
def _paint_palette_layout(self, cr):
mgr = self.get_color_manager()
if mgr.palette is None:
return
state = self.get_state_flags()
style = self.get_style_context()
bg_rgba = style.get_background_color(state)
bg_col = RGBColor.new_from_gdk_rgba(bg_rgba)
dx, dy = self.get_painting_offset()
_palette_render(mgr.palette, cr,
rows=self._rows, columns=self._columns,
swatch_size=self._swatch_size,
bg_color=bg_col,
offset_x=dx, offset_y=dy,
rtl=False)
def _paint_marker(self, cr, x, y, insert=False,
bg_rgb=(0, 0, 0), fg_rgb=(1, 1, 1),
bg_dash=[1, 2], fg_dash=[1, 2],
bg_width=2, fg_width=1):
cr.save()
cr.set_line_join(cairo.LINE_JOIN_ROUND)
cr.set_line_cap(cairo.LINE_CAP_ROUND)
size = self._swatch_size
w = h = size
# Background (shadow)
cr.set_source_rgb(*bg_rgb)
cr.set_line_width(bg_width)
if insert:
cr.move_to(x, y-1)
cr.line_to(x, y+h)
sw = int(w/4)
cr.move_to(x-sw, y-1)
cr.line_to(x+sw, y-1)
cr.move_to(x-sw, y+h)
cr.line_to(x+sw, y+h)
else:
cr.rectangle(x, y, w-1, h-1)
cr.set_dash(bg_dash)
cr.stroke_preserve()
# Foreground
cr.set_line_width(fg_width)
cr.set_dash(fg_dash)
cr.set_source_rgb(*fg_rgb)
cr.stroke()
cr.restore()
def paint_foreground_cb(self, cr, wd, ht):
mgr = self.get_color_manager()
if len(mgr.palette) < 1:
return
# Palette cells
self._paint_palette_layout(cr)
# Highlights
cr.set_line_cap(cairo.LINE_CAP_SQUARE)
# Current drag/drop target
if self._drag_insertion_index is not None:
i = self._drag_insertion_index
x, y = self.get_position_for_index(i)
insert = mgr.palette.get_color(i) is not None
self._paint_marker(cr, x, y, insert=insert)
# Position of the previous click
if self.show_matched_color:
i = mgr.palette.match_position
if i is not None:
x, y = self.get_position_for_index(i)
marker_args = [cr, x, y]
marker_kw = dict(bg_width=3, fg_width=1,
bg_dash=[2, 3], fg_dash=[2, 3])
if not mgr.palette.match_is_approx:
marker_kw.update(dict(bg_width=4, fg_width=1))
self._paint_marker(*marker_args, **marker_kw)
def get_position_for_index(self, i):
"""Gets the X and Y positions for a color cell at the given index"""
if None in (self._rows, self._columns):
return 0, 0
dx, dy = self.get_painting_offset()
s_w = s_h = self._swatch_size
c = i % self._columns
r = int(i / self._columns)
x = 0.5 + c*s_w
y = 0.5 + r*s_h
return x+dx, y+dy
def get_painting_offset(self):
if None in (self._rows, self._columns):
return 0, 0
sw = sh = self._swatch_size
l_wd = sw * self._columns
l_ht = sh * self._rows
alloc = self.get_allocation()
wd, ht = alloc.width, alloc.height
dx, dy = 0, 0
if l_wd < wd:
dx = (wd - l_wd)/2.0
if l_ht < ht:
dy = (ht - l_ht)/2.0
return 1+int(dx), 1+int(dy)
def get_color_at_position(self, x, y):
i = self.get_index_at_pos(x, y)
if i is not None:
mgr = self.get_color_manager()
col = mgr.palette.get_color(i)
if col is None:
return None
return col
def set_color_at_position(self, x, y, color):
i = self.get_index_at_pos(x, y)
mgr = self.get_color_manager()
if i is None:
mgr.palette.append(color)
else:
mgr.palette[i] = color
ColorAdjusterWidget.set_color_at_position(self, x, y, color)
def get_index_at_pos(self, x, y):
mgr = self.get_color_manager()
if mgr.palette is None:
return None
if None in (self._rows, self._columns):
return None
dx, dy = self.get_painting_offset()
x -= dx
y -= dy
s_wd = s_ht = self._swatch_size
r = int(y // s_ht)
c = int(x // s_wd)
if r < 0 or r >= self._rows:
return None
if c < 0 or c >= self._columns:
return None
i = r*self._columns + c
if i >= len(mgr.palette):
return None
return i
## Drag handling overrides
def drag_motion_cb(self, widget, context, x, y, t):
if "application/x-color" not in map(str, context.list_targets()):
return False
# Default action: copy means insert or overwrite
action = Gdk.DragAction.COPY
# Update the insertion marker
i = self.get_index_at_pos(x, y)
if i != self._drag_insertion_index:
self.queue_draw()
self._drag_insertion_index = i
# Dragging around inside the widget implies moving, by default
source_widget = Gtk.drag_get_source_widget(context)
if source_widget is self:
action = Gdk.DragAction.MOVE
if i is None:
action = Gdk.DragAction.DEFAULT # it'll be ignored
else:
mgr = self.get_color_manager()
if mgr.palette.get_color(i) is None:
# Empty swatch, convert moves to copies
action = Gdk.DragAction.COPY
# Cursor and status update
Gdk.drag_status(context, action, t)
def drag_data_received_cb(self, widget, context, x, y,
selection, info, t):
if "application/x-color" not in map(str, context.list_targets()):
return False
data = selection.get_data()
data_type = selection.get_data_type()
fmt = selection.get_format()
logger.debug("drag-data-received: got type=%r", data_type)
logger.debug("drag-data-received: got fmt=%r", fmt)
logger.debug("drag-data-received: got data=%r len=%r", data, len(data))
color = RGBColor.new_from_drag_data(data)
target_index = self.get_index_at_pos(x, y)
mgr = self.get_color_manager()
if Gtk.drag_get_source_widget(context) is self:
# Move/copy
current_index = mgr.palette.match_position
logger.debug("Move/copy %r -> %r", current_index, target_index)
assert current_index is not None
mgr.palette.reposition(current_index, target_index)
else:
if target_index is None:
# Append if the drop wasn't over a swatch
target_index = len(mgr.palette)
else:
# Insert before populated swatches, or overwrite empties
if mgr.palette.get_color(target_index) is None:
mgr.palette.pop(target_index)
mgr.palette.insert(target_index, color)
self.queue_draw()
self._drag_insertion_index = None
context.finish(True, True, t)
self.set_managed_color(color)
mgr.palette.set_match_position(target_index)
def drag_end_cb(self, widget, context):
self._drag_insertion_index = None
self.queue_draw()
def drag_leave_cb(self, widget, context, time):
self._drag_insertion_index = None
self.queue_draw()
## Loading and saving of palettes via a dialog
def palette_load_via_dialog(title, parent=None, preview=None,
shortcuts=None):
"""Runs a file chooser dialog, returning a palette or `None`.
The dialog is both modal and blocking. A new `Palette` object is returned
if the load was successful. The value `None` is returned otherwise.
:param parent: specifies the parent window
:param title: dialog title
:param preview: any preview widget with a ``set_palette()`` method
:param shortcuts: optional list of shortcut folders
"""
dialog = Gtk.FileChooserDialog(
title=title,
parent=parent,
action=Gtk.FileChooserAction.OPEN,
buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_OPEN, Gtk.ResponseType.ACCEPT),
)
if preview is not None:
dialog.set_preview_widget(preview)
dialog.connect("update-preview",
_palette_loadsave_dialog_update_preview_cb,
preview)
if shortcuts is not None:
for shortcut in shortcuts:
dialog.add_shortcut_folder(shortcut)
dialog.set_do_overwrite_confirmation(True)
filter = Gtk.FileFilter()
filter.add_pattern("*.gpl")
filter.set_name(_("GIMP palette file (*.gpl)"))
dialog.add_filter(filter)
filter = Gtk.FileFilter()
filter.add_pattern("*")
filter.set_name(_("All files (*)"))
dialog.add_filter(filter)
response_id = dialog.run()
palette = None
if response_id == Gtk.ResponseType.ACCEPT:
filename = dialog.get_filename()
logger.info("Loading palette from %r", filename)
palette = Palette(filename=filename)
dialog.destroy()
return palette
def palette_save_via_dialog(palette, title, parent=None, preview=None):
"""Runs a file chooser dialog for saving.
The dialog is both modal and blocking. Returns True if the file was saved
successfully.
:paraqm palette: the palette to save
:param parent: specifies the parent window
:param title: dialog title
:param preview: any preview widget with a ``set_palette()`` method
"""
dialog = Gtk.FileChooserDialog(
title=title,
parent=parent,
action=Gtk.FileChooserAction.SAVE,
buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_SAVE, Gtk.ResponseType.ACCEPT),
)
if preview is not None:
dialog.set_preview_widget(preview)
dialog.connect("update-preview",
_palette_loadsave_dialog_update_preview_cb,
preview)
dialog.set_do_overwrite_confirmation(True)
filter = Gtk.FileFilter()
filter.add_pattern("*.gpl")
filter.set_name(_("GIMP palette file (*.gpl)"))
dialog.add_filter(filter)
filter = Gtk.FileFilter()
filter.add_pattern("*")
filter.set_name(_("All files (*)"))
dialog.add_filter(filter)
response_id = dialog.run()
result = False
if response_id == Gtk.ResponseType.ACCEPT:
filename = dialog.get_filename()
filename = re.sub(r'[.]?(?:[Gg][Pp][Ll])?$', "", filename)
filename += ".gpl"
logger.info("Saving palette to %r", filename)
# FIXME: this can overwrite files without prompting the user, if
# the name hacking above changed the filename. Should do the name
# tweak within the dialog somehow and get that to confirm.
fp = open(filename, 'w')
palette.save(fp)
fp.flush()
fp.close()
result = True
dialog.destroy()
return result
def _palette_loadsave_dialog_update_preview_cb(dialog, preview):
"""Updates the preview widget when loading/saving palettes via dialog"""
filename = dialog.get_preview_filename()
palette = None
if filename is not None and os.path.isfile(filename):
try:
palette = Palette(filename=filename)
except Exception, ex:
logger.warning("Couldn't update preview widget: %s", str(ex))
return
if palette is not None and len(palette) > 0:
dialog.set_preview_widget_active(True)
preview.set_palette(palette)
preview.queue_draw()
else:
dialog.set_preview_widget_active(False)
## Palette rendering using Cairo
def _palette_render(palette, cr, rows, columns, swatch_size,
bg_color, offset_x=0, offset_y=0,
rtl=False):
"""Renders a Palette according to a precalculated grid.
:param cr: a Cairo context
:param rows: number of rows in the layout
:param columns: number of columns in the layout
:param swatch_size: size of each swatch, in pixels
:param bg_color: a `uicolor.UIColor` used when rendering the patterned
placeholder for an empty palette slot.
:param rtl: layout direction: set to True to render right to left,
instead of left to right. Currently ignored.
"""
HIGHLIGHT_DLUMA = 0.05
if len(palette) == 0:
return
if rows is None or columns is None:
return
cr.save()
cr.translate(offset_x, offset_y)
# Sizes and colors
swatch_w = swatch_h = swatch_size
light_col = HCYColor(color=bg_color)
dark_col = HCYColor(color=bg_color)
light_col.y = clamp(light_col.y + HIGHLIGHT_DLUMA, 0, 1)
dark_col.y = clamp(dark_col.y - HIGHLIGHT_DLUMA, 0, 1)
# Upper left outline (bottom right is covered below by the
# individual chips' shadows)
ul_col = HCYColor(color=bg_color)
ul_col.y *= 0.75
ul_col.c *= 0.5
cr.set_line_join(cairo.LINE_JOIN_ROUND)
cr.set_line_cap(cairo.LINE_CAP_ROUND)
cr.set_source_rgb(*ul_col.get_rgb())
cr.move_to(0.5, rows*swatch_h - 1)
cr.line_to(0.5, 0.5)
row1cells = min(columns, len(palette)) # needed?
cr.line_to(row1cells*swatch_w - 1, 0.5)
cr.set_line_width(2)
cr.stroke()
# Draw into the predefined grid
r = c = 0
cr.set_line_width(1.0)
cr.set_line_cap(cairo.LINE_CAP_SQUARE)
for col in palette.iter_colors():
s_x = c*swatch_w
s_y = r*swatch_h
s_w = swatch_w
s_h = swatch_h
# Select fill bg and pattern fg colors, Tango-style edge highlight
# and lower-right shadow.
if col is None:
# Empty slot, fill with a pattern
hi_rgb = light_col.get_rgb()
fill_bg_rgb = dark_col.get_rgb()
fill_fg_rgb = light_col.get_rgb()
sh_col = HCYColor(color=bg_color)
sh_col.y *= 0.75
sh_col.c *= 0.5
sh_rgb = sh_col.get_rgb()
else:
# Color swatch
hi_col = HCYColor(color=col)
hi_col.y = min(hi_col.y * 1.1, 1)
hi_col.c = min(hi_col.c * 1.1, 1)
sh_col = HCYColor(color=col)
sh_col.y *= 0.666
sh_col.c *= 0.5
hi_rgb = hi_col.get_rgb()
fill_bg_rgb = col.get_rgb()
fill_fg_rgb = None
sh_rgb = sh_col.get_rgb()
# Draw the swatch / color chip
cr.set_source_rgb(*sh_rgb)
cr.rectangle(s_x, s_y, s_w, s_h)
cr.fill()
cr.set_source_rgb(*fill_bg_rgb)
cr.rectangle(s_x, s_y, s_w-1, s_h-1)
cr.fill()
if fill_fg_rgb is not None:
s_w2 = int((s_w-1) / 2)
s_h2 = int((s_h-1) / 2)
cr.set_source_rgb(*fill_fg_rgb)
cr.rectangle(s_x, s_y, s_w2, s_h2)
cr.fill()
cr.rectangle(s_x+s_w2, s_y+s_h2, s_w2, s_h2)
cr.fill()
cr.set_source_rgb(*hi_rgb)
cr.rectangle(s_x+0.5, s_y+0.5, s_w-2, s_h-2)
cr.stroke()
c += 1
if c >= columns:
c = 0
r += 1
cr.restore()
## Module testing
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
import sys
win = Gtk.Window()
win.set_title("palette view")
win.connect("destroy", lambda *a: Gtk.main_quit())
mgr = ColorManager(prefs={}, datapath=".")
spv = PaletteView()
spv.grid.show_matched_color = True
spv.grid.can_select_empty = True
spv.set_color_manager(mgr)
spv.set_size_request(150, 150)
if len(sys.argv[1:]) > 0:
palette_file = sys.argv[1] # GIMP palette file (*.gpl)
palette = Palette(filename=palette_file)
mgr.palette.update(palette)
win.add(spv)
win.show_all()
Gtk.main()
|
muntasirsyed/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/databrowse/plugins/calendars.py
|
247
|
from django import http
from django.db import models
from django.contrib.databrowse.datastructures import EasyModel
from django.contrib.databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.utils.text import capfirst
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django.views.generic import date_based
from django.utils import datetime_safe
class CalendarPlugin(DatabrowsePlugin):
def __init__(self, field_names=None):
self.field_names = field_names
def field_dict(self, model):
"""
Helper function that returns a dictionary of all DateFields or
DateTimeFields in the given model. If self.field_names is set, it takes
take that into account when building the dictionary.
"""
if self.field_names is None:
return dict([(f.name, f) for f in model._meta.fields if isinstance(f, models.DateField)])
else:
return dict([(f.name, f) for f in model._meta.fields if isinstance(f, models.DateField) and f.name in self.field_names])
def model_index_html(self, request, model, site):
fields = self.field_dict(model)
if not fields:
return u''
return mark_safe(u'<p class="filter"><strong>View calendar by:</strong> %s</p>' % \
u', '.join(['<a href="calendars/%s/">%s</a>' % (f.name, force_unicode(capfirst(f.verbose_name))) for f in fields.values()]))
def urls(self, plugin_name, easy_instance_field):
if isinstance(easy_instance_field.field, models.DateField):
d = easy_instance_field.raw_value
return [mark_safe(u'%s%s/%s/%s/%s/%s/' % (
easy_instance_field.model.url(),
plugin_name, easy_instance_field.field.name,
str(d.year),
datetime_safe.new_date(d).strftime('%b').lower(),
d.day))]
def model_view(self, request, model_databrowse, url):
self.model, self.site = model_databrowse.model, model_databrowse.site
self.fields = self.field_dict(self.model)
# If the model has no DateFields, there's no point in going further.
if not self.fields:
raise http.Http404('The requested model has no calendars.')
if url is None:
return self.homepage_view(request)
url_bits = url.split('/')
if self.fields.has_key(url_bits[0]):
return self.calendar_view(request, self.fields[url_bits[0]], *url_bits[1:])
raise http.Http404('The requested page does not exist.')
def homepage_view(self, request):
easy_model = EasyModel(self.site, self.model)
field_list = self.fields.values()
field_list.sort(key=lambda k:k.verbose_name)
return render_to_response('databrowse/calendar_homepage.html', {'root_url': self.site.root_url, 'model': easy_model, 'field_list': field_list})
def calendar_view(self, request, field, year=None, month=None, day=None):
easy_model = EasyModel(self.site, self.model)
queryset = easy_model.get_query_set()
extra_context = {'root_url': self.site.root_url, 'model': easy_model, 'field': field}
if day is not None:
return date_based.archive_day(request, year, month, day, queryset, field.name,
template_name='databrowse/calendar_day.html', allow_empty=False, allow_future=True,
extra_context=extra_context)
elif month is not None:
return date_based.archive_month(request, year, month, queryset, field.name,
template_name='databrowse/calendar_month.html', allow_empty=False, allow_future=True,
extra_context=extra_context)
elif year is not None:
return date_based.archive_year(request, year, queryset, field.name,
template_name='databrowse/calendar_year.html', allow_empty=False, allow_future=True,
extra_context=extra_context)
else:
return date_based.archive_index(request, queryset, field.name,
template_name='databrowse/calendar_main.html', allow_empty=True, allow_future=True,
extra_context=extra_context)
assert False, ('%s, %s, %s, %s' % (field, year, month, day))
|
robot-army/gazecontrol
|
refs/heads/master
|
net_utils.py
|
2
|
# Gaze Control - A real-time control application for Tobii Pro Glasses 2.
#
# Copyright 2017 Shadi El Hajj
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import urllib2
import json
import time
def mksock(peer):
''' Create a socket pair for a peer description '''
iptype = socket.AF_INET
if ':' in peer[0]:
iptype = socket.AF_INET6
return socket.socket(iptype, socket.SOCK_DGRAM)
def post_request(base_url, api_action, data=None):
''' send an HTTP REST POST request '''
url = base_url + api_action
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
data = json.dumps(data)
response = urllib2.urlopen(req, data)
data = response.read()
json_data = json.loads(data)
return json_data
def wait_for_status(base_url, api_action, key, values):
''' poll for an HTTP response '''
url = base_url + api_action
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
response = urllib2.urlopen(req, None)
data = response.read()
json_data = json.loads(data)
if json_data[key] in values:
return json_data[key]
else:
return None
|
wuhengzhi/chromium-crosswalk
|
refs/heads/master
|
third_party/WebKit/Tools/Scripts/webkitpy/common/system/logtesting.py
|
68
|
# Copyright (C) 2010 Chris Jerdonek (cjerdonek@webkit.org)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Supports the unit-testing of logging code.
Provides support for unit-testing messages logged using the built-in
logging module.
Inherit from the LoggingTestCase class for basic testing needs. For
more advanced needs (e.g. unit-testing methods that configure logging),
see the TestLogStream class, and perhaps also the LogTesting class.
"""
import logging
import unittest
class TestLogStream(object):
"""Represents a file-like object for unit-testing logging.
This is meant for passing to the logging.StreamHandler constructor.
Log messages captured by instances of this object can be tested
using self.assertMessages() below.
"""
def __init__(self, test_case):
"""Create an instance.
Args:
test_case: A unittest.TestCase instance.
"""
self._test_case = test_case
self.messages = []
"""A list of log messages written to the stream."""
# Python documentation says that any object passed to the StreamHandler
# constructor should support write() and flush():
#
# http://docs.python.org/library/logging.html#module-logging.handlers
def write(self, message):
self.messages.append(message)
def flush(self):
pass
def assertMessages(self, messages):
"""Assert that the given messages match the logged messages.
messages: A list of log message strings.
"""
self._test_case.assertEqual(messages, self.messages)
class LogTesting(object):
"""Supports end-to-end unit-testing of log messages.
Sample usage:
class SampleTest(unittest.TestCase):
def setUp(self):
self._log = LogTesting.setUp(self) # Turn logging on.
def tearDown(self):
self._log.tearDown() # Turn off and reset logging.
def test_logging_in_some_method(self):
call_some_method() # Contains calls to _log.info(), etc.
# Check the resulting log messages.
self._log.assertMessages(["INFO: expected message #1",
"WARNING: expected message #2"])
"""
def __init__(self, test_stream, handler):
"""Create an instance.
This method should never be called directly. Instances should
instead be created using the static setUp() method.
Args:
test_stream: A TestLogStream instance.
handler: The handler added to the logger.
"""
self._test_stream = test_stream
self._handler = handler
@staticmethod
def _getLogger():
"""Return the logger being tested."""
# It is possible we might want to return something other than
# the root logger in some special situation. For now, the
# root logger seems to suffice.
return logging.getLogger()
@staticmethod
def setUp(test_case, logging_level=logging.INFO):
"""Configure logging for unit testing.
Configures the root logger to log to a testing log stream.
Only messages logged at or above the given level are logged
to the stream. Messages logged to the stream are formatted
in the following way, for example--
"INFO: This is a test log message."
This method should normally be called in the setUp() method
of a unittest.TestCase. See the docstring of this class
for more details.
Returns:
A LogTesting instance.
Args:
test_case: A unittest.TestCase instance.
logging_level: An integer logging level that is the minimum level
of log messages you would like to test.
"""
stream = TestLogStream(test_case)
handler = logging.StreamHandler(stream)
handler.setLevel(logging_level)
formatter = logging.Formatter("%(levelname)s: %(message)s")
handler.setFormatter(formatter)
# Notice that we only change the root logger by adding a handler
# to it. In particular, we do not reset its level using
# logger.setLevel(). This ensures that we have not interfered
# with how the code being tested may have configured the root
# logger.
logger = LogTesting._getLogger()
logger.addHandler(handler)
return LogTesting(stream, handler)
def tearDown(self):
"""Assert there are no remaining log messages, and reset logging.
This method asserts that there are no more messages in the array of
log messages, and then restores logging to its original state.
This method should normally be called in the tearDown() method of a
unittest.TestCase. See the docstring of this class for more details.
"""
self.assertMessages([])
logger = LogTesting._getLogger()
logger.removeHandler(self._handler)
def messages(self):
"""Return the current list of log messages."""
return self._test_stream.messages
# FIXME: Add a clearMessages() method for cases where the caller
# deliberately doesn't want to assert every message.
# We clear the log messages after asserting since they are no longer
# needed after asserting. This serves two purposes: (1) it simplifies
# the calling code when we want to check multiple logging calls in a
# single test method, and (2) it lets us check in the tearDown() method
# that there are no remaining log messages to be asserted.
#
# The latter ensures that no extra log messages are getting logged that
# the caller might not be aware of or may have forgotten to check for.
# This gets us a bit more mileage out of our tests without writing any
# additional code.
def assertMessages(self, messages):
"""Assert the current array of log messages, and clear its contents.
Args:
messages: A list of log message strings.
"""
try:
self._test_stream.assertMessages(messages)
finally:
# We want to clear the array of messages even in the case of
# an Exception (e.g. an AssertionError). Otherwise, another
# AssertionError can occur in the tearDown() because the
# array might not have gotten emptied.
self._test_stream.messages = []
# This class needs to inherit from unittest.TestCase. Otherwise, the
# setUp() and tearDown() methods will not get fired for test case classes
# that inherit from this class -- even if the class inherits from *both*
# unittest.TestCase and LoggingTestCase.
#
# FIXME: Rename this class to LoggingTestCaseBase to be sure that
# the unittest module does not interpret this class as a unittest
# test case itself.
class LoggingTestCase(unittest.TestCase):
"""Supports end-to-end unit-testing of log messages.
Sample usage:
class SampleTest(LoggingTestCase):
def test_logging_in_some_method(self):
call_some_method() # Contains calls to _log.info(), etc.
# Check the resulting log messages.
self.assertLog(["INFO: expected message #1",
"WARNING: expected message #2"])
"""
def setUp(self):
self._log = LogTesting.setUp(self)
def tearDown(self):
self._log.tearDown()
def logMessages(self):
"""Return the current list of log messages."""
return self._log.messages()
# FIXME: Add a clearMessages() method for cases where the caller
# deliberately doesn't want to assert every message.
# See the code comments preceding LogTesting.assertMessages() for
# an explanation of why we clear the array of messages after
# asserting its contents.
def assertLog(self, messages):
"""Assert the current array of log messages, and clear its contents.
Args:
messages: A list of log message strings.
"""
self._log.assertMessages(messages)
|
jgravois/ArcREST
|
refs/heads/master
|
src/arcrest/common/general.py
|
2
|
import datetime
import time
import json
import arcpy
import copy
import os
import tempfile
import uuid
from spatial import json_to_featureclass
from geometry import Point, MultiPoint, Polygon, Polyline, SpatialReference
from .._abstract.abstract import AbstractGeometry
#from ..agol import featureservice as agolFeatureService
#from ..agol import layer as agolLayer
def _unicode_convert(obj):
""" converts unicode to anscii """
if isinstance(obj, dict):
return {_unicode_convert(key): _unicode_convert(value) for key, value in obj.iteritems()}
elif isinstance(obj, list):
return [_unicode_convert(element) for element in obj]
elif isinstance(obj, unicode):
return obj.encode('utf-8')
else:
return obj
#----------------------------------------------------------------------
def _date_handler(obj):
if isinstance(obj, datetime.datetime):
return local_time_to_online(obj)
#elif isinstance(obj, (agolFeatureService.FeatureService,
#agolLayer.FeatureLayer,
#agolLayer.TableLayer)):
#return dict(obj)
else:
return obj
#----------------------------------------------------------------------
def local_time_to_online(dt=None):
"""
converts datetime object to a UTC timestamp for AGOL
Inputs:
dt - datetime object
Output:
Long value
"""
if dt is None:
dt = datetime.datetime.now()
is_dst = time.daylight and time.localtime().tm_isdst > 0
utc_offset = (time.altzone if is_dst else time.timezone)
return (time.mktime(dt.timetuple()) * 1000) + (utc_offset *1000)
#----------------------------------------------------------------------
def online_time_to_string(value,timeFormat):
"""
Converts a timestamp to date/time string
Inputs:
value - timestamp as long
timeFormat - output date/time format
Output:
string
"""
return datetime.datetime.fromtimestamp(value /1000).strftime(timeFormat)
#----------------------------------------------------------------------
def timestamp_to_datetime(timestamp):
"""
Converts a timestamp to a datetime object
Inputs:
timestamp - timestamp value as Long
output:
datetime object
"""
return datetime.datetime.fromtimestamp(timestamp /1000)
########################################################################
class Feature(object):
""" returns a feature """
_geom = None
_json = None
_dict = None
_geom = None
_geomType = None
_attributes = None
_wkid = None
#----------------------------------------------------------------------
def __init__(self, json_string, wkid=None):
"""Constructor"""
self._wkid = wkid
if type(json_string) is dict:
if not wkid is None:
if 'geometry' in json_string and 'spatialReference' in json_string['geometry']:
json_string['geometry']['spatialReference'] = {"wkid" : wkid}
self._json = json.dumps(json_string,
default=_date_handler)
self._dict = json_string
elif type(json_string) is str:
self._dict = json.loads(json_string)
if not wkid is None:
self._dict['geometry']['spatialReference'] = {"wkid" : wkid}
self._json = json.dumps(self._dict,
default=_date_handler)
else:
raise TypeError("Invalid Input, only dictionary or string allowed")
#----------------------------------------------------------------------
def set_value(self, field_name, value):
""" sets an attribute value for a given field name """
if field_name in self.fields:
if not value is None:
self._dict['attributes'][field_name] = _unicode_convert(value)
self._json = json.dumps(self._dict, default=_date_handler)
else:
pass
elif field_name.upper() in ['SHAPE', 'SHAPE@', "GEOMETRY"]:
if isinstance(value, AbstractGeometry):
if isinstance(value, Point):
self._dict['geometry'] = {
"x" : value.asDictionary['x'],
"y" : value.asDictionary['y']
}
elif isinstance(value, MultiPoint):
self._dict['geometry'] = {
"points" : value.asDictionary['points']
}
elif isinstance(value, Polyline):
self._dict['geometry'] = {
"paths" : value.asDictionary['paths']
}
elif isinstance(value, Polygon):
self._dict['geometry'] = {
"rings" : value.asDictionary['rings']
}
else:
return False
self._json = json.dumps(self._dict, default=_date_handler)
elif isinstance(value, arcpy.Geometry):
if isinstance(value, arcpy.PointGeometry):
self.set_value( field_name, Point(value,value.spatialReference.factoryCode))
elif isinstance(value, arcpy.Multipoint):
self.set_value( field_name, MultiPoint(value,value.spatialReference.factoryCode))
elif isinstance(value, arcpy.Polyline):
self.set_value( field_name, Polyline(value,value.spatialReference.factoryCode))
elif isinstance(value, arcpy.Polygon):
self.set_value( field_name, Polygon(value,value.spatialReference.factoryCode))
else:
return False
return True
#----------------------------------------------------------------------
def get_value(self, field_name):
""" returns a value for a given field name """
if field_name in self.fields:
return self._dict['attributes'][field_name]
elif field_name.upper() in ['SHAPE', 'SHAPE@', "GEOMETRY"]:
return self._dict['geometry']
return None
#----------------------------------------------------------------------
@property
def asDictionary(self):
"""returns the feature as a dictionary"""
feat_dict = {}
if self._geom is not None:
if self._dict.has_key('feature'):
feat_dict['geometry'] = self._dict['feature']['geometry']
elif self._dict.has_key('geometry'):
feat_dict['geometry'] = self._dict['geometry']
if self._dict.has_key("feature"):
feat_dict['attributes'] = self._dict['feature']['attributes']
else:
feat_dict['attributes'] = self._dict['attributes']
return self._dict
#----------------------------------------------------------------------
@property
def asRow(self):
""" converts a feature to a list for insertion into an insert cursor
Output:
[row items], [field names]
returns a list of fields and the row object
"""
fields = self.fields
row = [""] * len(fields)
for k,v in self._attributes.iteritems():
row[fields.index(k)] = v
del v
del k
if self.geometry is not None:
row.append(self.geometry)
fields.append("SHAPE@")
return row, fields
#----------------------------------------------------------------------
@property
def geometry(self):
"""returns the feature geometry"""
if not self._wkid is None:
sr = arcpy.SpatialReference(self._wkid)
else:
sr = None
if self._geom is None:
if self._dict.has_key('feature'):
self._geom = arcpy.AsShape(self._dict['feature']['geometry'], esri_json=True)
elif self._dict.has_key('geometry'):
self._geom = arcpy.AsShape(self._dict['geometry'], esri_json=True)
return self._geom
#----------------------------------------------------------------------
@property
def fields(self):
""" returns a list of feature fields """
if self._dict.has_key("feature"):
self._attributes = self._dict['feature']['attributes']
else:
self._attributes = self._dict['attributes']
return self._attributes.keys()
#----------------------------------------------------------------------
@property
def geometryType(self):
""" returns the feature's geometry type """
if self._geomType is None:
if self.geometry is not None:
self._geomType = self.geometry.type
else:
self._geomType = "Table"
return self._geomType
@staticmethod
def fc_to_features(dataset):
"""
converts a dataset to a list of feature objects
Input:
dataset - path to table or feature class
Output:
list of feature objects
"""
desc = arcpy.Describe(dataset)
fields = [field.name for field in arcpy.ListFields(dataset) if field.type not in ['Geometry']]
date_fields = [field.name for field in arcpy.ListFields(dataset) if field.type =='Date']
non_geom_fields = copy.deepcopy(fields)
features = []
if hasattr(desc, "shapeFieldName"):
fields.append("SHAPE@JSON")
del desc
with arcpy.da.SearchCursor(dataset, fields) as rows:
for row in rows:
row = list(row)
for df in date_fields:
if row[fields.index(df)] != None:
row[fields.index(df)] = int((_date_handler(row[fields.index(df)])))
template = {
"attributes" : dict(zip(non_geom_fields, row))
}
if "SHAPE@JSON" in fields:
template['geometry'] = \
json.loads(row[fields.index("SHAPE@JSON")])
features.append(
Feature(json_string=_unicode_convert(template))
)
del row
return features
#----------------------------------------------------------------------
def __str__(self):
""""""
return json.dumps(self.asDictionary)
########################################################################
class MosaicRuleObject(object):
"""
The image service uses a mosaic rule to mosaick multiple rasters on the
fly. The mosaic rule parameter is used by many image service operations,
for example, export image and identify operations.
"""
__allowedMosaicMethods = [
"esriMosaicNone",
"esriMosaicCenter",
"esriMosaicNadir",
"esriMosaicViewpoint",
"esriMosaicAttribute",
"esriMosaicLockRaster",
"esriMosaicNorthwest",
"esriMosaicSeamline"
]
__allowedMosaicOps = [
"MT_FIRST",
"MT_LAST",
"MT_MIN",
"MT_MAX",
"MT_MEAN",
"MT_BLEND",
"MT_SUM"
]
_mosaicMethod = None
_where = None
_sortField = None
_sortValue = None
_ascending = None
_lockRasterIds = None
_viewpoint = None
_fids = None
_mosaicOperation = None
_itemRenderingRule = None
#----------------------------------------------------------------------
def __init__(self,
mosaicMethod,
where="",
sortField="",
sortValue="",
ascending=True,
lockRasterIds=[],
viewpoint=None,
fids=[],
mosaicOperation=None,
itemRenderingRule=""):
"""Constructor"""
if mosaicMethod in self.__allowedMosaicMethods:
self._mosaicMethod = mosaicMethod
else:
raise AttributeError("Invalid mosaic method.")
self._where = where
self._sortField = sortField
self._sortValue = sortValue
self._ascending = ascending
self._localRasterIds = lockRasterIds
self._itemRenderingRule = itemRenderingRule
if isinstance(viewpoint, Point):
self._viewpoint = viewpoint
self._fids = fids
if mosaicOperation is not None and \
mosaicOperation in self.__allowedMosaicOps:
self._mosaicOperation = mosaicOperation
#----------------------------------------------------------------------
@property
def where(self):
"""
Use where clause to define a subset of rasters used in the mosaic,
be aware that the rasters may not be visible at all scales
"""
return self._where
#----------------------------------------------------------------------
@where.setter
def where(self, value):
"""
Use where clause to define a subset of rasters used in the mosaic,
be aware that the rasters may not be visible at all scales
"""
if value != self._where:
self._where = value
#----------------------------------------------------------------------
@property
def mosaicMethod(self):
"""
get/set the mosaic method
"""
return self._mosaicMethod
#----------------------------------------------------------------------
@mosaicMethod.setter
def mosaicMethod(self, value):
"""
get/set the mosaic method
"""
if value in self.__allowedMosaicMethods and \
self._mosaicMethod != value:
self._mosaicMethod = value
#----------------------------------------------------------------------
@property
def sortField(self):
""""""
return self._sortField
#----------------------------------------------------------------------
@sortField.setter
def sortField(self, value):
""""""
if self._sortField != value:
self._sortField = value
#----------------------------------------------------------------------
@property
def sortValue(self):
""""""
return self._sortValue
#----------------------------------------------------------------------
@sortValue.setter
def sortValue(self, value):
""""""
if self._sortValue != value:
self._sortValue = value
#----------------------------------------------------------------------
@property
def ascending(self):
""""""
return self._ascending
#----------------------------------------------------------------------
@ascending.setter
def ascending(self, value):
""""""
if isinstance(value, bool):
self._ascending = value
#----------------------------------------------------------------------
@property
def lockRasterIds(self):
""""""
return self._lockRasterIds
#----------------------------------------------------------------------
@lockRasterIds.setter
def lockRasterIds(self, value):
""""""
if isinstance(self._lockRasterIds, list):
self._lockRasterIds = value
#----------------------------------------------------------------------
@property
def viewpoint(self):
""""""
return self._viewpoint
#----------------------------------------------------------------------
@viewpoint.setter
def viewpoint(self, value):
""""""
if isinstance(value, Point):
self._viewpoint = value
#----------------------------------------------------------------------
@property
def fids(self):
""""""
return self._fids
#----------------------------------------------------------------------
@fids.setter
def fids(self, value):
""""""
self._fids = value
#----------------------------------------------------------------------
@property
def mosaicOperation(self):
""""""
return self._mosaicOperation
#----------------------------------------------------------------------
@mosaicOperation.setter
def mosaicOperation(self, value):
""""""
if value in self.__allowedMosaicOps and \
self._mosaicOperation != value:
self._mosaicOperation = value
#----------------------------------------------------------------------
@property
def itemRenderingRule(self):
""""""
return self._itemRenderingRule
#----------------------------------------------------------------------
@itemRenderingRule.setter
def itemRenderingRule(self, value):
""""""
if self._itemRenderingRule != value:
self._itemRenderingRule = value
#----------------------------------------------------------------------
@property
def value(self):
"""
gets the mosaic rule object as a dictionary
"""
if self.mosaicMethod == "esriMosaicNone" or\
self.mosaicMethod == "esriMosaicCenter" or \
self.mosaicMethod == "esriMosaicNorthwest" or \
self.mosaicMethod == "esriMosaicNadir":
return {
"mosaicMethod" : "esriMosaicNone",
"where" : self._where,
"ascending" : self._ascending,
"fids" : self.fids,
"mosaicOperation" : self._mosaicOperation
}
elif self.mosaicMethod == "esriMosaicViewpoint":
return {
"mosaicMethod" : "esriMosaicViewpoint",
"viewpoint" : self._viewpoint.asDictionary,
"where" : self._where,
"ascending" : self._ascending,
"fids" : self._fids,
"mosaicOperation" : self._mosaicOperation
}
elif self.mosaicMethod == "esriMosaicAttribute":
return {
"mosaicMethod" : "esriMosaicAttribute",
"sortField" : self._sortField,
"sortValue" : self._sortValue,
"ascending" : self._ascending,
"where" : self._where,
"fids" : self._fids,
"mosaicOperation" : self._mosaicOperation
}
elif self.mosaicMethod == "esriMosaicLockRaster":
return {
"mosaicMethod" : "esriMosaicLockRaster",
"lockRasterIds" : self._localRasterIds,
"where" : self._where,
"ascending" : self._ascending,
"fids" : self._fids,
"mosaicOperation" : self._mosaicOperation
}
elif self.mosaicMethod == "esriMosaicSeamline":
return {
"mosaicMethod" : "esriMosaicSeamline",
"where" : self._where,
"fids" : self._fids,
"mosaicOperation" : self._mosaicOperation
}
else:
raise AttributeError("Invalid Mosaic Method")
########################################################################
class FeatureSet(object):
"""
This featureSet contains Feature objects, including the values for the
fields requested by the user. For layers, if you request geometry
information, the geometry of each feature is also returned in the
featureSet. For tables, the featureSet does not include geometries.
If a spatialReference is not specified at the featureSet level, the
featureSet will assume the spatialReference of its first feature. If
the spatialReference of the first feature is also not specified, the
spatial reference will be UnknownCoordinateSystem.
"""
_fields = None
_features = None
_hasZ = None
_hasM = None
_geometryType = None
_spatialReference = None
_objectIdFieldName = None
_globalIdFieldName = None
_displayFieldName = None
_allowedGeomTypes = ["esriGeometryPoint","esriGeometryMultipoint","esriGeometryPolyline",
"esriGeometryPolygon","esriGeometryEnvelope"]
#----------------------------------------------------------------------
def __init__(self,
fields,
features,
hasZ=False,
hasM=False,
geometryType=None,
spatialReference=None,
displayFieldName=None,
objectIdFieldName=None,
globalIdFieldName=None):
"""Constructor"""
self._fields = fields
self._features = features
self._hasZ = hasZ
self._hasM = hasM
self._geometryType = geometryType
self._spatialReference = spatialReference
self._displayFieldName = displayFieldName
self._objectIdFieldName = objectIdFieldName
self._globalIdFieldName = globalIdFieldName
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
return json.dumps(self.value)
#----------------------------------------------------------------------
@property
def value(self):
"""returns object as dictionary"""
return {
"objectIdFieldName" : self._objectIdFieldName,
"displayFieldName" : self._displayFieldName,
"globalIdFieldName" : self._globalIdFieldName,
"geometryType" : self._geometryType,
"spatialReference" : self._spatialReference,
"hasZ" : self._hasZ,
"hasM" : self._hasM,
"fields" : self._fields,
"features" : [f.asDictionary for f in self._features]
}
#----------------------------------------------------------------------
@property
def toJSON(self):
"""converts the object to JSON"""
return json.dumps(self.value)
#----------------------------------------------------------------------
def __iter__(self):
"""featureset iterator on features in feature set"""
for feature in self._features:
yield feature
#----------------------------------------------------------------------
def __len__(self):
"""returns the length of features in feature set"""
return len(self._features)
#----------------------------------------------------------------------
@staticmethod
def fromJSON(jsonValue):
"""returns a featureset from a JSON string"""
jd = json.loads(jsonValue)
features = []
if jd.has_key('fields'):
fields = jd['fields']
else:
fields = {'fields':[]}
for feat in jd['features']:
wkid = None
if 'spatialReference' in jd and 'latestWkid' in jd['spatialReference']:
wkid = jd['spatialReference']['latestWkid']
features.append(Feature(json_string=feat, wkid=wkid))
return FeatureSet(fields,
features,
hasZ=jd['hasZ'] if 'hasZ' in jd else False,
hasM=jd['hasM'] if 'hasM' in jd else False,
geometryType=jd['geometryType'] if 'geometryType' in jd else None,
objectIdFieldName=jd['objectIdFieldName'] if 'objectIdFieldName' in jd else None,
globalIdFieldName=jd['globalIdFieldName'] if 'globalIdFieldName' in jd else None,
displayFieldName=jd['displayFieldName'] if 'displayFieldName' in jd else None,
spatialReference=jd['spatialReference'] if 'spatialReference' in jd else None)
#----------------------------------------------------------------------
@property
def fields(self):
"""gets the featureset's fields"""
return self._fields
#----------------------------------------------------------------------
@property
def spatialReference(self):
"""gets the featureset's spatial reference"""
return self._spatialReference
#----------------------------------------------------------------------
@spatialReference.setter
def spatialReference(self, value):
"""sets the featureset's spatial reference"""
if isinstance(value, SpatialReference):
self._spatialReference = value
elif isinstance(value, int):
self._spatialReference = SpatialReference(wkid=value)
elif isinstance(value, str) and \
str(value).isdigit():
self._spatialReference = SpatialReference(wkid=int(value))
#----------------------------------------------------------------------
@property
def hasZ(self):
"""gets/sets the Z-property"""
return self._hasZ
#----------------------------------------------------------------------
@hasZ.setter
def hasZ(self, value):
"""gets/sets the Z-property"""
if isinstance(value, bool):
self._hasZ = value
#----------------------------------------------------------------------
@property
def hasM(self):
"""gets/set the M-property"""
return self._hasM
#----------------------------------------------------------------------
@hasM.setter
def hasM(self, value):
"""gets/set the M-property"""
if isinstance(value, bool):
self._hasM = value
#----------------------------------------------------------------------
@property
def geometryType(self):
"""gets/sets the geometry Type"""
return self._geometryType
#----------------------------------------------------------------------
@geometryType.setter
def geometryType(self, value):
"""gets/sets the geometry Type"""
if value in self._allowedGeomTypes:
self._geometryType = value
#----------------------------------------------------------------------
@property
def objectIdFieldName(self):
"""gets/sets the object id field"""
return self._objectIdFieldName
#----------------------------------------------------------------------
@objectIdFieldName.setter
def objectIdFieldName(self, value):
"""gets/sets the object id field"""
self._objectIdFieldName = value
#----------------------------------------------------------------------
@property
def globalIdFieldName(self):
"""gets/sets the globalIdFieldName"""
return self._globalIdFieldName
#----------------------------------------------------------------------
@globalIdFieldName.setter
def globalIdFieldName(self, value):
"""gets/sets the globalIdFieldName"""
self._globalIdFieldName = value
#----------------------------------------------------------------------
@property
def displayFieldName(self):
"""gets/sets the displayFieldName"""
return self._displayFieldName
#----------------------------------------------------------------------
@displayFieldName.setter
def displayFieldName(self, value):
"""gets/sets the displayFieldName"""
self._displayFieldName = value
#----------------------------------------------------------------------
def save(self, saveLocation, outName):
"""
Saves a featureset object to a feature class
Input:
saveLocation - output location of the data
outName - name of the table the data will be saved to
"""
tempDir = tempfile.gettempdir()
tempFile = os.path.join(tempDir, "%s.json" % uuid.uuid4().get_hex())
with open(tempFile, 'wb') as writer:
writer.write(str(self))
writer.flush()
writer.close()
del writer
res = json_to_featureclass(json_file=tempFile,
out_fc=os.path.join(saveLocation, outName))
os.remove(tempFile)
return res
#----------------------------------------------------------------------
@property
def features(self):
"""gets the features in the FeatureSet"""
return self._features
|
gunan/tensorflow
|
refs/heads/master
|
tensorflow/python/keras/tests/convert_to_constants_test.py
|
1
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for convert_to_constants.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import convert_to_constants
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.platform import test
from tensorflow.python.saved_model.load import load
from tensorflow.python.saved_model.save import save
from tensorflow.python.training.tracking import tracking
from tensorflow.python.util import nest
class VariablesToConstantsTest(test.TestCase):
def _freezeModel(self, model):
"""Freezes the model.
Args:
model: Function.
Returns:
root: AutoTrackable object with original ConcreteFunction.
output_func: frozen ConcreteFunction.
"""
root = tracking.AutoTrackable()
root.f = model
input_func = root.f.get_concrete_function()
output_func = convert_to_constants.convert_variables_to_constants_v2(
input_func, lower_control_flow=False)
return root, output_func
def _hasStatefulPartitionedCallOp(self, graph_def):
"""Determines if a StatefulPartitionedCall op exists in the graph."""
for node in graph_def.node:
if node.op == "StatefulPartitionedCall":
return True
return False
def _getNumVariables(self, graph_def):
"""Returns the number of ReadVariableOp in the graph."""
return sum(node.op == "ReadVariableOp" for node in graph_def.node)
def _testConvertedFunction(self, obj, func, converted_concrete_func,
input_data):
# Ensure the converted graph has no variables and no function calls.
constant_graph_def = converted_concrete_func.graph.as_graph_def()
self.assertEqual(0, self._getNumVariables(constant_graph_def))
self.assertFalse(self._hasStatefulPartitionedCallOp(constant_graph_def))
# Check that the converted ConcreteFunction produces the same result as the
# original Function.
expected_value = nest.flatten(func(**input_data))
actual_value = nest.flatten(converted_concrete_func(**input_data))
for expected, actual in zip(expected_value, actual_value):
np.testing.assert_almost_equal(expected.numpy(), actual.numpy())
# Ensure the shape is retained.
for tensor in converted_concrete_func.inputs:
actual_shape = input_data[tensor.name.split(":")[0]].shape
self.assertEqual(tensor.shape, actual_shape)
# Save the converted ConcreteFunction as a signature.
save_dir = os.path.join(self.get_temp_dir(), "frozen_saved_model")
root = tracking.AutoTrackable()
root.f = converted_concrete_func
save(root, save_dir, {"mykey": converted_concrete_func})
# Load it back and make sure it works.
loaded_obj = load(save_dir)
actual_value = nest.flatten(loaded_obj.signatures["mykey"](**input_data))
for expected, actual in zip(expected_value, actual_value):
np.testing.assert_almost_equal(expected.numpy(), actual.numpy())
@test_util.run_v2_only
def testKerasModel(self):
"""Test a basic Keras model with Variables."""
input_data = {"x": constant_op.constant(1., shape=[1, 1])}
# Create a simple Keras model.
x = [-1, 0, 1, 2, 3, 4]
y = [-3, -1, 1, 3, 5, 7]
model = keras.models.Sequential(
[keras.layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer="sgd", loss="mean_squared_error")
model.fit(x, y, epochs=1)
@def_function.function(input_signature=[
tensor_spec.TensorSpec(shape=[1, 1], dtype=dtypes.float32)
])
def to_save(x):
return model(x)
root, output_func = self._freezeModel(to_save)
self._testConvertedFunction(root, root.f, output_func, input_data)
@test_util.run_v2_only
def testKerasLSTM(self):
"""Test a Keras LSTM containing dynamic_rnn ops."""
input_data = {
"x":
constant_op.constant(
np.array(
np.random.random_sample((10, 10, 10)), dtype=np.float32))
}
model = keras.models.Sequential(
[keras.layers.LSTM(units=10, input_shape=(10, 10))])
@def_function.function(input_signature=[
tensor_spec.TensorSpec(shape=[10, 10, 10], dtype=dtypes.float32)
])
def to_save(x):
return model(x)
root, output_func = self._freezeModel(to_save)
self._testConvertedFunction(root, root.f, output_func, input_data)
@test_util.run_v2_only
def testEmbeddings(self):
"""Test model with embeddings."""
input_data = {
"x":
constant_op.constant(
np.array(np.random.random_sample((20)), dtype=np.int32))
}
class EmbeddingModel(keras.Model):
def __init__(self):
super(EmbeddingModel, self).__init__()
self.shared_weights = self.add_weight(
"weights",
shape=(2000, 300),
dtype=dtypes.float32,
initializer=init_ops.random_normal_initializer(
mean=0.0, stddev=300**(-0.5)))
@def_function.function(input_signature=[
tensor_spec.TensorSpec(shape=(20), dtype=dtypes.int32)
])
def func(self, x):
return array_ops.gather(self.shared_weights, x)
model = EmbeddingModel()
root, output_func = self._freezeModel(model.func)
self._testConvertedFunction(root, root.f, output_func, input_data)
if __name__ == "__main__":
test.main()
|
sander76/home-assistant
|
refs/heads/dev
|
tests/components/keenetic_ndms2/__init__.py
|
5
|
"""Tests for the Keenetic NDMS2 component."""
from homeassistant.components.keenetic_ndms2 import const
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
MOCK_NAME = "Keenetic Ultra 2030"
MOCK_DATA = {
CONF_HOST: "0.0.0.0",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 23,
}
MOCK_OPTIONS = {
CONF_SCAN_INTERVAL: 15,
const.CONF_CONSIDER_HOME: 150,
const.CONF_TRY_HOTSPOT: False,
const.CONF_INCLUDE_ARP: True,
const.CONF_INCLUDE_ASSOCIATED: True,
const.CONF_INTERFACES: ["Home", "VPS0"],
}
|
pbanaszkiewicz/amy
|
refs/heads/develop
|
amy/fiscal/tests/test_organization.py
|
1
|
from django.urls import reverse
from django_comments.models import Comment
from fiscal.forms import OrganizationCreateForm, OrganizationForm
from workshops.models import Event, Organization
from workshops.tests.base import TestBase
class TestOrganization(TestBase):
def setUp(self):
super().setUp()
self._setUpUsersAndLogin()
def test_organization_delete(self):
"""Make sure deleted organization is longer accessible.
Additionally check on_delete behavior for Event."""
Event.objects.create(
host=self.org_alpha, administrator=self.org_beta, slug="test-event"
)
for org_domain in [self.org_alpha.domain_quoted, self.org_beta.domain_quoted]:
rv = self.client.post(
reverse(
"organization_delete",
args=[
org_domain,
],
)
)
content = rv.content.decode("utf-8")
assert "Failed to delete" in content
Event.objects.get(slug="test-event").delete()
for org_domain in [self.org_alpha.domain_quoted, self.org_beta.domain_quoted]:
rv = self.client.post(
reverse(
"organization_delete",
args=[
org_domain,
],
)
)
assert rv.status_code == 302
with self.assertRaises(Organization.DoesNotExist):
Organization.objects.get(domain=org_domain)
def test_organization_invalid_chars_in_domain(self):
"""Ensure organisation's domain is cleaned from URL scheme, if it was present.
Ensure other parts of the URL remain.
The cleaning exists in OrganizationForm.clean_domain.
"""
test_data = [
("http://example.com/", "example.com/"),
("https://example.com/", "example.com/"),
("ftp://example.com/", "example.com/"),
("http://example.com", "example.com"),
("//example.com", "example.com"),
("//example.com/", "example.com/"),
("//example.com/?query", "example.com/?query"),
("//example.com/path/", "example.com/path/"),
("//example.com/path", "example.com/path"),
("//example.com:80/path/?query", "example.com:80/path/?query"),
("example.com/path;params?query#fragment", "example.com/path?query"),
(
"//user:password@example.com:80/path?query",
"user:password@example.com:80/path?query",
),
]
for domain, expected in test_data:
with self.subTest(domain=domain):
form = OrganizationForm({"domain": domain})
form.full_clean()
self.assertIn("domain", form.cleaned_data)
self.assertEqual(form.cleaned_data["domain"], expected)
def test_creating_event_with_no_comment(self):
"""Ensure that no comment is added when OrganizationCreateForm without
comment content is saved."""
self.assertEqual(Comment.objects.count(), 0)
data = {
"fullname": "Test Organization",
"domain": "test.org",
"comment": "",
}
form = OrganizationCreateForm(data)
form.save()
self.assertEqual(Comment.objects.count(), 0)
def test_creating_event_with_comment(self):
"""Ensure that a comment is added when OrganizationCreateForm with
comment content is saved."""
self.assertEqual(Comment.objects.count(), 0)
data = {
"fullname": "Test Organization",
"domain": "test.org",
"comment": "This is a test comment.",
}
form = OrganizationCreateForm(data)
obj = form.save()
self.assertEqual(Comment.objects.count(), 1)
comment = Comment.objects.first()
self.assertEqual(comment.comment, "This is a test comment.")
self.assertIn(comment, Comment.objects.for_model(obj))
def test_symmetrical_affiliations(self):
"""Make sure adding an affiliation in one organisation, automatically reveals
this relationship in the other organisation."""
# Arrange - `setUp()` creates 2 organisations we can use
# Act
self.org_alpha.affiliated_organizations.add(self.org_beta)
# Assert
self.assertIn(self.org_beta, self.org_alpha.affiliated_organizations.all())
self.assertIn(self.org_alpha, self.org_beta.affiliated_organizations.all())
|
dmclee/python-mobile-connect
|
refs/heads/master
|
mc_test/mc_test/models.py
|
10644
|
from django.db import models
# Create your models here.
|
jose36/jmdl2
|
refs/heads/master
|
servers/firedrive.py
|
40
|
# -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para firedrive
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[firedrive.py] test_video_exists(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url)
if '<div class="sad_face_image">' in data and '404:' in data:
return False,"El video ha sido borrado de Firedrive"
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[firedrive.py] url="+page_url)
video_urls = []
headers = []
headers.append( [ "User-Agent" , "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.52 Safari/537.17"] )
headers.append( [ "Accept" , "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" ])
headers.append( [ "Accept-Charset" , "ISO-8859-1,utf-8;q=0.7,*;q=0.3" ])
headers.append( [ "Accept-Encoding", "gzip,deflate,sdch" ])
headers.append( [ "Accept-Language", "es-ES,es;q=0.8" ])
headers.append( [ "Cache-Control" , "max-age=0" ])
headers.append( [ "Connection" , "keep-alive" ])
headers.append( [ "Origin" , "http://www.firedrive.com" ])
# Primer acceso
data = scrapertools.cache_page(page_url,headers=headers)
#logger.info("data="+data)
# Simula el "continue to video"
confirm = scrapertools.find_single_match(data,'<input type="hidden" name="confirm" value="([^"]+)"')
post = urllib.urlencode({'confirm':confirm})
logger.info("post="+post)
headers.append( ["Referer",page_url] )
headers.append( ["Content-Type","application/x-www-form-urlencoded"])
data = scrapertools.cache_page( page_url , post=post, headers=headers )
logger.info("data="+data)
# URL del descriptor
url = scrapertools.find_single_match(data,"file\: loadURL\('([^']+)'")
logger.info("url="+url)
# URL del vídeo
media_url = scrapertools.get_header_from_response(url,header_to_get="location")
video_urls.append( [ scrapertools.get_filename_from_url(media_url)[-4:] + " [firedrive]",media_url ] )
for video_url in video_urls:
logger.info("[firedrive.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos de este servidor en el texto pasado
def find_videos(text):
encontrados = set()
devuelve = []
# http://www.peliculasaudiolatino.com/show/firedrive.php?url=CEE0B3A7DDFED758
patronvideos = '(?:firedrive|putlocker).php\?url=([A-Z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www.firedrive.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
# http://www.firedrive.com/embed/CEE0B3A7DDFED758 | http://www.firedrive.com/file/CEE0B3A7DDFED758
patronvideos = '(?:firedrive|putlocker).com/(?:file|embed)/([A-Z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www.firedrive.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#//www.cinezer.com/firedrive/CD6003D971725774
patronvideos = '/(?:firedrive|putlocker)/([A-Z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www.firedrive.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.firedrive.ch/file/0e6f1eeb473e0d87b390a71cd50c24a2/
patronvideos = '((?:firedrive|putlocker).ch/file/[a-z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www."+match+"/"
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.player3k.info/firedrive/?id=92FA671A11CA7A05
patronvideos = '/(?:firedrive|putlocker)/\?id\=([A-Z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www.firedrive.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.yaske.net/archivos/firedrive/play.php?v=D68E78CBA144AE59
patronvideos = '(?:firedrive|putlocker)/play.php\?v\=([A-Z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www.firedrive.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.cinetux.org/video/firedrive.php?id=31A2C1B48C5F8969
patronvideos = '(?:firedrive|putlocker).php\?id\=([A-Z0-9]+)'
logger.info("[firedrive.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[firedrive]"
url = "http://www.firedrive.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'firedrive' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://www.firedrive.com/embed/C31F4FD09113E884")
return len(video_urls)>0
|
levilucio/SyVOLT
|
refs/heads/master
|
mbeddr2C_MM/transformation_from_mps/Hlayer2rule3.py
|
1
|
from core.himesis import Himesis
import uuid
class Hlayer2rule3(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the DSLTrans rule layer2rule3.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(Hlayer2rule3, self).__init__(name='Hlayer2rule3', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """layer2rule3"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'layer2rule3')
# match model. We only support one match model
self.add_node()
self.vs[0]["mm__"] = """MatchModel"""
# apply model node
self.add_node()
self.vs[1]["mm__"] = """ApplyModel"""
# paired with relation between match and apply models
self.add_node()
self.vs[2]["mm__"] = """paired_with"""
self.vs[2]["attr1"] = """layer2rule3"""
# match class Operation(layer2rule3class0Operation) node
self.add_node()
self.vs[3]["mm__"] = """Operation"""
self.vs[3]["attr1"] = """+"""
# apply class FunctionPrototype(layer2rule3class1FunctionPrototype) node
self.add_node()
self.vs[4]["mm__"] = """FunctionPrototype"""
self.vs[4]["attr1"] = """1"""
# apply class Argument(layer2rule3class2Argument) node
self.add_node()
self.vs[5]["mm__"] = """Argument"""
self.vs[5]["attr1"] = """1"""
# apply class VoidType(layer2rule3class3VoidType) node
self.add_node()
self.vs[6]["mm__"] = """VoidType"""
self.vs[6]["attr1"] = """1"""
# apply class PointerType(layer2rule3class4PointerType) node
self.add_node()
self.vs[7]["mm__"] = """PointerType"""
self.vs[7]["attr1"] = """1"""
# apply association FunctionPrototype--arguments-->Argument node
self.add_node()
self.vs[8]["attr1"] = """arguments"""
self.vs[8]["mm__"] = """directLink_T"""
# apply association Argument--type-->PointerType node
self.add_node()
self.vs[9]["attr1"] = """type"""
self.vs[9]["mm__"] = """directLink_T"""
# apply association PointerType--baseType-->VoidType node
self.add_node()
self.vs[10]["attr1"] = """baseType"""
self.vs[10]["mm__"] = """directLink_T"""
# backward association FunctionPrototype-->Operationnode
self.add_node()
self.vs[11]["mm__"] = """backward_link"""
# Add the edges
self.add_edges([
(0,3), # matchmodel -> match_class Operation(layer2rule3class0Operation)
(1,4), # applymodel -> apply_classFunctionPrototype(layer2rule3class1FunctionPrototype)
(1,5), # applymodel -> apply_classArgument(layer2rule3class2Argument)
(1,6), # applymodel -> apply_classVoidType(layer2rule3class3VoidType)
(1,7), # applymodel -> apply_classPointerType(layer2rule3class4PointerType)
(4,8), # apply class FunctionPrototype(layer2rule3class1FunctionPrototype) -> association arguments
(8,5), # associationarguments -> apply_classArgument(layer2rule3class2Argument)
(5,9), # apply class Argument(layer2rule3class2Argument) -> association type
(9,7), # associationtype -> apply_classPointerType(layer2rule3class4PointerType)
(7,10), # apply class PointerType(layer2rule3class4PointerType) -> association baseType
(10,6), # associationbaseType -> apply_classVoidType(layer2rule3class3VoidType)
(4,11), # apply class FunctionPrototype(layer2rule3class0Operation) -> backward_association
(11,3), # backward_associationOperation -> match_class Operation(layer2rule3class0Operation)
(0,2), # matchmodel -> pairedwith
(2,1) # pairedwith -> applyModel
])
self["equations"] = [((5,'name'),('constant','___id')),]
|
espadrine/opera
|
refs/heads/master
|
chromium/src/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/http_lock_unittest.py
|
124
|
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from http_lock import HttpLock
import os # Used for os.getpid()
import unittest2 as unittest
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.executive_mock import MockExecutive
# FIXME: These tests all touch the real disk, but could be written to a MockFileSystem instead.
class HttpLockTestWithRealFileSystem(unittest.TestCase):
# FIXME: Unit tests do not use an __init__ method, but rather setUp and tearDown methods.
def __init__(self, testFunc):
self.http_lock = HttpLock(None, "WebKitTestHttpd.lock.", "WebKitTest.lock")
self.filesystem = self.http_lock._filesystem # FIXME: We should be passing in a MockFileSystem instead.
self.lock_file_path_prefix = self.filesystem.join(self.http_lock._lock_path, self.http_lock._lock_file_prefix)
self.lock_file_name = self.lock_file_path_prefix + "0"
self.guard_lock_file = self.http_lock._guard_lock_file
self.clean_all_lockfile()
unittest.TestCase.__init__(self, testFunc)
def clean_all_lockfile(self):
if self.filesystem.exists(self.guard_lock_file):
self.filesystem.remove(self.guard_lock_file)
lock_list = self.filesystem.glob(self.lock_file_path_prefix + '*')
for file_name in lock_list:
self.filesystem.remove(file_name)
def assertEqual(self, first, second):
if first != second:
self.clean_all_lockfile()
unittest.TestCase.assertEqual(self, first, second)
def _check_lock_file(self):
if self.filesystem.exists(self.lock_file_name):
pid = os.getpid()
lock_file_pid = self.filesystem.read_text_file(self.lock_file_name)
self.assertEqual(pid, int(lock_file_pid))
return True
return False
def test_lock_lifecycle(self):
self.http_lock._create_lock_file()
self.assertEqual(True, self._check_lock_file())
self.assertEqual(1, self.http_lock._next_lock_number())
self.http_lock.cleanup_http_lock()
self.assertEqual(False, self._check_lock_file())
self.assertEqual(0, self.http_lock._next_lock_number())
class HttpLockTest(unittest.TestCase):
def setUp(self):
self.filesystem = MockFileSystem()
self.http_lock = HttpLock(None, "WebKitTestHttpd.lock.", "WebKitTest.lock", filesystem=self.filesystem, executive=MockExecutive())
# FIXME: Shouldn't we be able to get these values from the http_lock object directly?
self.lock_file_path_prefix = self.filesystem.join(self.http_lock._lock_path, self.http_lock._lock_file_prefix)
self.lock_file_name = self.lock_file_path_prefix + "0"
def test_current_lock_pid(self):
# FIXME: Once Executive wraps getpid, we can mock this and not use a real pid.
current_pid = os.getpid()
self.http_lock._filesystem.write_text_file(self.lock_file_name, str(current_pid))
self.assertEqual(self.http_lock._current_lock_pid(), current_pid)
def test_extract_lock_number(self):
lock_file_list = (
self.lock_file_path_prefix + "00",
self.lock_file_path_prefix + "9",
self.lock_file_path_prefix + "001",
self.lock_file_path_prefix + "021",
)
expected_number_list = (0, 9, 1, 21)
for lock_file, expected in zip(lock_file_list, expected_number_list):
self.assertEqual(self.http_lock._extract_lock_number(lock_file), expected)
def test_lock_file_list(self):
self.http_lock._filesystem = MockFileSystem({
self.lock_file_path_prefix + "6": "",
self.lock_file_path_prefix + "1": "",
self.lock_file_path_prefix + "4": "",
self.lock_file_path_prefix + "3": "",
})
expected_file_list = [
self.lock_file_path_prefix + "1",
self.lock_file_path_prefix + "3",
self.lock_file_path_prefix + "4",
self.lock_file_path_prefix + "6",
]
self.assertEqual(self.http_lock._lock_file_list(), expected_file_list)
|
akx/stackspy
|
refs/heads/master
|
stackspy/detection/__init__.py
|
2
|
# -- encoding: utf-8 --
from __future__ import with_statement
|
zdary/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyTupleAssignmentBalanceInspection/py4360.py
|
30
|
(a, b) = <warning descr="Too many values to unpack">1, 2, 3</warning>
(a, b) = <warning descr="Too many values to unpack">(1, 2, 3)</warning>
|
markap/TravelMap
|
refs/heads/master
|
boilerplate/external/babel/numbers.py
|
54
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Locale dependent formatting and parsing of numeric data.
The default locale for the functions in this module is determined by the
following environment variables, in that order:
* ``LC_NUMERIC``,
* ``LC_ALL``, and
* ``LANG``
"""
# TODO:
# Padding and rounding increments in pattern:
# - http://www.unicode.org/reports/tr35/ (Appendix G.6)
import math
import re
try:
from decimal import Decimal
have_decimal = True
except ImportError:
have_decimal = False
from babel.core import default_locale, Locale
from babel.util import rsplit
__all__ = ['format_number', 'format_decimal', 'format_currency',
'format_percent', 'format_scientific', 'parse_number',
'parse_decimal', 'NumberFormatError']
__docformat__ = 'restructuredtext en'
LC_NUMERIC = default_locale('LC_NUMERIC')
def get_currency_name(currency, locale=LC_NUMERIC):
"""Return the name used by the locale for the specified currency.
>>> get_currency_name('USD', 'en_US')
u'US Dollar'
:param currency: the currency code
:param locale: the `Locale` object or locale identifier
:return: the currency symbol
:rtype: `unicode`
:since: version 0.9.4
"""
return Locale.parse(locale).currencies.get(currency, currency)
def get_currency_symbol(currency, locale=LC_NUMERIC):
"""Return the symbol used by the locale for the specified currency.
>>> get_currency_symbol('USD', 'en_US')
u'$'
:param currency: the currency code
:param locale: the `Locale` object or locale identifier
:return: the currency symbol
:rtype: `unicode`
"""
return Locale.parse(locale).currency_symbols.get(currency, currency)
def get_decimal_symbol(locale=LC_NUMERIC):
"""Return the symbol used by the locale to separate decimal fractions.
>>> get_decimal_symbol('en_US')
u'.'
:param locale: the `Locale` object or locale identifier
:return: the decimal symbol
:rtype: `unicode`
"""
return Locale.parse(locale).number_symbols.get('decimal', u'.')
def get_plus_sign_symbol(locale=LC_NUMERIC):
"""Return the plus sign symbol used by the current locale.
>>> get_plus_sign_symbol('en_US')
u'+'
:param locale: the `Locale` object or locale identifier
:return: the plus sign symbol
:rtype: `unicode`
"""
return Locale.parse(locale).number_symbols.get('plusSign', u'+')
def get_minus_sign_symbol(locale=LC_NUMERIC):
"""Return the plus sign symbol used by the current locale.
>>> get_minus_sign_symbol('en_US')
u'-'
:param locale: the `Locale` object or locale identifier
:return: the plus sign symbol
:rtype: `unicode`
"""
return Locale.parse(locale).number_symbols.get('minusSign', u'-')
def get_exponential_symbol(locale=LC_NUMERIC):
"""Return the symbol used by the locale to separate mantissa and exponent.
>>> get_exponential_symbol('en_US')
u'E'
:param locale: the `Locale` object or locale identifier
:return: the exponential symbol
:rtype: `unicode`
"""
return Locale.parse(locale).number_symbols.get('exponential', u'E')
def get_group_symbol(locale=LC_NUMERIC):
"""Return the symbol used by the locale to separate groups of thousands.
>>> get_group_symbol('en_US')
u','
:param locale: the `Locale` object or locale identifier
:return: the group symbol
:rtype: `unicode`
"""
return Locale.parse(locale).number_symbols.get('group', u',')
def format_number(number, locale=LC_NUMERIC):
"""Return the given number formatted for a specific locale.
>>> format_number(1099, locale='en_US')
u'1,099'
:param number: the number to format
:param locale: the `Locale` object or locale identifier
:return: the formatted number
:rtype: `unicode`
"""
# Do we really need this one?
return format_decimal(number, locale=locale)
def format_decimal(number, format=None, locale=LC_NUMERIC):
"""Return the given decimal number formatted for a specific locale.
>>> format_decimal(1.2345, locale='en_US')
u'1.234'
>>> format_decimal(1.2346, locale='en_US')
u'1.235'
>>> format_decimal(-1.2346, locale='en_US')
u'-1.235'
>>> format_decimal(1.2345, locale='sv_SE')
u'1,234'
>>> format_decimal(12345, locale='de')
u'12.345'
The appropriate thousands grouping and the decimal separator are used for
each locale:
>>> format_decimal(12345.5, locale='en_US')
u'12,345.5'
:param number: the number to format
:param format:
:param locale: the `Locale` object or locale identifier
:return: the formatted decimal number
:rtype: `unicode`
"""
locale = Locale.parse(locale)
if not format:
format = locale.decimal_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale)
def format_currency(number, currency, format=None, locale=LC_NUMERIC):
u"""Return formatted currency value.
>>> format_currency(1099.98, 'USD', locale='en_US')
u'$1,099.98'
>>> format_currency(1099.98, 'USD', locale='es_CO')
u'US$\\xa01.099,98'
>>> format_currency(1099.98, 'EUR', locale='de_DE')
u'1.099,98\\xa0\\u20ac'
The pattern can also be specified explicitly:
>>> format_currency(1099.98, 'EUR', u'\xa4\xa4 #,##0.00', locale='en_US')
u'EUR 1,099.98'
:param number: the number to format
:param currency: the currency code
:param locale: the `Locale` object or locale identifier
:return: the formatted currency value
:rtype: `unicode`
"""
locale = Locale.parse(locale)
if not format:
format = locale.currency_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale, currency=currency)
def format_percent(number, format=None, locale=LC_NUMERIC):
"""Return formatted percent value for a specific locale.
>>> format_percent(0.34, locale='en_US')
u'34%'
>>> format_percent(25.1234, locale='en_US')
u'2,512%'
>>> format_percent(25.1234, locale='sv_SE')
u'2\\xa0512\\xa0%'
The format pattern can also be specified explicitly:
>>> format_percent(25.1234, u'#,##0\u2030', locale='en_US')
u'25,123\u2030'
:param number: the percent number to format
:param format:
:param locale: the `Locale` object or locale identifier
:return: the formatted percent number
:rtype: `unicode`
"""
locale = Locale.parse(locale)
if not format:
format = locale.percent_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale)
def format_scientific(number, format=None, locale=LC_NUMERIC):
"""Return value formatted in scientific notation for a specific locale.
>>> format_scientific(10000, locale='en_US')
u'1E4'
The format pattern can also be specified explicitly:
>>> format_scientific(1234567, u'##0E00', locale='en_US')
u'1.23E06'
:param number: the number to format
:param format:
:param locale: the `Locale` object or locale identifier
:return: value formatted in scientific notation.
:rtype: `unicode`
"""
locale = Locale.parse(locale)
if not format:
format = locale.scientific_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale)
class NumberFormatError(ValueError):
"""Exception raised when a string cannot be parsed into a number."""
def parse_number(string, locale=LC_NUMERIC):
"""Parse localized number string into a long integer.
>>> parse_number('1,099', locale='en_US')
1099L
>>> parse_number('1.099', locale='de_DE')
1099L
When the given string cannot be parsed, an exception is raised:
>>> parse_number('1.099,98', locale='de')
Traceback (most recent call last):
...
NumberFormatError: '1.099,98' is not a valid number
:param string: the string to parse
:param locale: the `Locale` object or locale identifier
:return: the parsed number
:rtype: `long`
:raise `NumberFormatError`: if the string can not be converted to a number
"""
try:
return long(string.replace(get_group_symbol(locale), ''))
except ValueError:
raise NumberFormatError('%r is not a valid number' % string)
def parse_decimal(string, locale=LC_NUMERIC):
"""Parse localized decimal string into a float.
>>> parse_decimal('1,099.98', locale='en_US')
1099.98
>>> parse_decimal('1.099,98', locale='de')
1099.98
When the given string cannot be parsed, an exception is raised:
>>> parse_decimal('2,109,998', locale='de')
Traceback (most recent call last):
...
NumberFormatError: '2,109,998' is not a valid decimal number
:param string: the string to parse
:param locale: the `Locale` object or locale identifier
:return: the parsed decimal number
:rtype: `float`
:raise `NumberFormatError`: if the string can not be converted to a
decimal number
"""
locale = Locale.parse(locale)
try:
return float(string.replace(get_group_symbol(locale), '')
.replace(get_decimal_symbol(locale), '.'))
except ValueError:
raise NumberFormatError('%r is not a valid decimal number' % string)
PREFIX_END = r'[^0-9@#.,]'
NUMBER_TOKEN = r'[0-9@#.\-,E+]'
PREFIX_PATTERN = r"(?P<prefix>(?:'[^']*'|%s)*)" % PREFIX_END
NUMBER_PATTERN = r"(?P<number>%s+)" % NUMBER_TOKEN
SUFFIX_PATTERN = r"(?P<suffix>.*)"
number_re = re.compile(r"%s%s%s" % (PREFIX_PATTERN, NUMBER_PATTERN,
SUFFIX_PATTERN))
def split_number(value):
"""Convert a number into a (intasstring, fractionasstring) tuple"""
if have_decimal and isinstance(value, Decimal):
text = str(value)
else:
text = ('%.9f' % value).rstrip('0')
if '.' in text:
a, b = text.split('.', 1)
if b == '0':
b = ''
else:
a, b = text, ''
return a, b
def bankersround(value, ndigits=0):
"""Round a number to a given precision.
Works like round() except that the round-half-even (banker's rounding)
algorithm is used instead of round-half-up.
>>> bankersround(5.5, 0)
6.0
>>> bankersround(6.5, 0)
6.0
>>> bankersround(-6.5, 0)
-6.0
>>> bankersround(1234.0, -2)
1200.0
"""
sign = int(value < 0) and -1 or 1
value = abs(value)
a, b = split_number(value)
digits = a + b
add = 0
i = len(a) + ndigits
if i < 0 or i >= len(digits):
pass
elif digits[i] > '5':
add = 1
elif digits[i] == '5' and digits[i-1] in '13579':
add = 1
scale = 10**ndigits
if have_decimal and isinstance(value, Decimal):
return Decimal(int(value * scale + add)) / scale * sign
else:
return float(int(value * scale + add)) / scale * sign
def parse_pattern(pattern):
"""Parse number format patterns"""
if isinstance(pattern, NumberPattern):
return pattern
# Do we have a negative subpattern?
if ';' in pattern:
pattern, neg_pattern = pattern.split(';', 1)
pos_prefix, number, pos_suffix = number_re.search(pattern).groups()
neg_prefix, _, neg_suffix = number_re.search(neg_pattern).groups()
else:
pos_prefix, number, pos_suffix = number_re.search(pattern).groups()
neg_prefix = '-' + pos_prefix
neg_suffix = pos_suffix
if 'E' in number:
number, exp = number.split('E', 1)
else:
exp = None
if '@' in number:
if '.' in number and '0' in number:
raise ValueError('Significant digit patterns can not contain '
'"@" or "0"')
if '.' in number:
integer, fraction = rsplit(number, '.', 1)
else:
integer = number
fraction = ''
min_frac = max_frac = 0
def parse_precision(p):
"""Calculate the min and max allowed digits"""
min = max = 0
for c in p:
if c in '@0':
min += 1
max += 1
elif c == '#':
max += 1
elif c == ',':
continue
else:
break
return min, max
def parse_grouping(p):
"""Parse primary and secondary digit grouping
>>> parse_grouping('##')
0, 0
>>> parse_grouping('#,###')
3, 3
>>> parse_grouping('#,####,###')
3, 4
"""
width = len(p)
g1 = p.rfind(',')
if g1 == -1:
return 1000, 1000
g1 = width - g1 - 1
g2 = p[:-g1 - 1].rfind(',')
if g2 == -1:
return g1, g1
g2 = width - g1 - g2 - 2
return g1, g2
int_prec = parse_precision(integer)
frac_prec = parse_precision(fraction)
if exp:
frac_prec = parse_precision(integer+fraction)
exp_plus = exp.startswith('+')
exp = exp.lstrip('+')
exp_prec = parse_precision(exp)
else:
exp_plus = None
exp_prec = None
grouping = parse_grouping(integer)
return NumberPattern(pattern, (pos_prefix, neg_prefix),
(pos_suffix, neg_suffix), grouping,
int_prec, frac_prec,
exp_prec, exp_plus)
class NumberPattern(object):
def __init__(self, pattern, prefix, suffix, grouping,
int_prec, frac_prec, exp_prec, exp_plus):
self.pattern = pattern
self.prefix = prefix
self.suffix = suffix
self.grouping = grouping
self.int_prec = int_prec
self.frac_prec = frac_prec
self.exp_prec = exp_prec
self.exp_plus = exp_plus
if '%' in ''.join(self.prefix + self.suffix):
self.scale = 100
elif u'‰' in ''.join(self.prefix + self.suffix):
self.scale = 1000
else:
self.scale = 1
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.pattern)
def apply(self, value, locale, currency=None):
value *= self.scale
is_negative = int(value < 0)
if self.exp_prec: # Scientific notation
value = abs(value)
if value:
exp = int(math.floor(math.log(value, 10)))
else:
exp = 0
# Minimum number of integer digits
if self.int_prec[0] == self.int_prec[1]:
exp -= self.int_prec[0] - 1
# Exponent grouping
elif self.int_prec[1]:
exp = int(exp) / self.int_prec[1] * self.int_prec[1]
if not have_decimal or not isinstance(value, Decimal):
value = float(value)
if exp < 0:
value = value * 10**(-exp)
else:
value = value / 10**exp
exp_sign = ''
if exp < 0:
exp_sign = get_minus_sign_symbol(locale)
elif self.exp_plus:
exp_sign = get_plus_sign_symbol(locale)
exp = abs(exp)
number = u'%s%s%s%s' % \
(self._format_sigdig(value, self.frac_prec[0],
self.frac_prec[1]),
get_exponential_symbol(locale), exp_sign,
self._format_int(str(exp), self.exp_prec[0],
self.exp_prec[1], locale))
elif '@' in self.pattern: # Is it a siginificant digits pattern?
text = self._format_sigdig(abs(value),
self.int_prec[0],
self.int_prec[1])
if '.' in text:
a, b = text.split('.')
a = self._format_int(a, 0, 1000, locale)
if b:
b = get_decimal_symbol(locale) + b
number = a + b
else:
number = self._format_int(text, 0, 1000, locale)
else: # A normal number pattern
a, b = split_number(bankersround(abs(value),
self.frac_prec[1]))
b = b or '0'
a = self._format_int(a, self.int_prec[0],
self.int_prec[1], locale)
b = self._format_frac(b, locale)
number = a + b
retval = u'%s%s%s' % (self.prefix[is_negative], number,
self.suffix[is_negative])
if u'¤' in retval:
retval = retval.replace(u'¤¤', currency.upper())
retval = retval.replace(u'¤', get_currency_symbol(currency, locale))
return retval
def _format_sigdig(self, value, min, max):
"""Convert value to a string.
The resulting string will contain between (min, max) number of
significant digits.
"""
a, b = split_number(value)
ndecimals = len(a)
if a == '0' and b != '':
ndecimals = 0
while b.startswith('0'):
b = b[1:]
ndecimals -= 1
a, b = split_number(bankersround(value, max - ndecimals))
digits = len((a + b).lstrip('0'))
if not digits:
digits = 1
# Figure out if we need to add any trailing '0':s
if len(a) >= max and a != '0':
return a
if digits < min:
b += ('0' * (min - digits))
if b:
return '%s.%s' % (a, b)
return a
def _format_int(self, value, min, max, locale):
width = len(value)
if width < min:
value = '0' * (min - width) + value
gsize = self.grouping[0]
ret = ''
symbol = get_group_symbol(locale)
while len(value) > gsize:
ret = symbol + value[-gsize:] + ret
value = value[:-gsize]
gsize = self.grouping[1]
return value + ret
def _format_frac(self, value, locale):
min, max = self.frac_prec
if len(value) < min:
value += ('0' * (min - len(value)))
if max == 0 or (min == 0 and int(value) == 0):
return ''
width = len(value)
while len(value) > min and value[-1] == '0':
value = value[:-1]
return get_decimal_symbol(locale) + value
|
fergalbyrne/nupic
|
refs/heads/master
|
examples/opf/experiments/classification/category_TP_1/description.py
|
32
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
import os
from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription
# the sub-experiment configuration
config = \
{
'dataSource': 'file://' + os.path.join(os.path.dirname(__file__),
'../datasets/category_TP_1.csv'),
'modelParams': { 'clParams': { 'clVerbosity': 0},
'sensorParams': { 'encoders': { }, 'verbosity': 0},
'spParams': { 'spVerbosity': 0},
'tpEnable': True,
'tpParams': { 'verbosity': 0}}}
mod = importBaseDescription('../base_category/description.py', config)
locals().update(mod.__dict__)
|
niphlod/pydal
|
refs/heads/master
|
pydal/parsers/__init__.py
|
4
|
from collections import defaultdict
from .._compat import with_metaclass, iteritems
from .._gae import gae
from ..helpers._internals import Dispatcher
from ..helpers.regex import REGEX_TYPE
parsers = Dispatcher("parser")
class for_type(object):
def __init__(self, field_type):
self.field_type = field_type
def __call__(self, f):
self.f = f
return self
class before_parse(object):
def __init__(self, field_type):
self.field_type = field_type
def __call__(self, f):
self.f = f
return self
class MetaParser(type):
def __new__(cls, name, bases, attrs):
new_class = type.__new__(cls, name, bases, attrs)
if bases == (object,):
return new_class
#: collect declared attributes
parsers = {}
before = {}
for key, value in list(attrs.items()):
if isinstance(value, for_type):
parsers[key] = value
elif isinstance(value, before_parse):
before[key] = value
#: get super declared attributes
declared_parsers = {}
declared_before = {}
for base in reversed(new_class.__mro__[1:]):
if hasattr(base, '_declared_parsers_'):
declared_parsers.update(base._declared_parsers_)
if hasattr(base, '_declared_before_'):
declared_before.update(base._declared_before_)
#: set parsers
declared_parsers.update(parsers)
declared_before.update(before)
new_class._declared_parsers_ = declared_parsers
new_class._declared_before_ = declared_before
return new_class
class ParserMethodWrapper(object):
def __init__(self, parser, f, extra=None):
self.parser = parser
self.f = f
if extra:
self.extra = extra
self.call = self._call_with_extras
else:
self.call = self._call
def _call_with_extras(self, value, field_type):
extras = self.extra(self.parser, field_type)
return self.f(self.parser, value, **extras)
def _call(self, value, field_type):
return self.f(self.parser, value)
def __call__(self, value, field_type):
return self.call(value, field_type)
class Parser(with_metaclass(MetaParser)):
def __init__(self, adapter):
self.adapter = adapter
self.dialect = adapter.dialect
self._before_registry_ = {}
for name, obj in iteritems(self._declared_before_):
self._before_registry_[obj.field_type] = obj.f
self.registered = defaultdict(lambda self=self: self._default)
for name, obj in iteritems(self._declared_parsers_):
if obj.field_type in self._before_registry_:
self.registered[obj.field_type] = ParserMethodWrapper(
self, obj.f, self._before_registry_[obj.field_type]
)
else:
self.registered[obj.field_type] = ParserMethodWrapper(
self, obj.f
)
def _default(self, value, field_type):
return value
def parse(self, value, field_itype, field_type):
return self.registered[field_itype](value, field_type)
from .base import BasicParser
from .sqlite import SQLiteParser
from .postgre import PostgreParser
from .mongo import MongoParser
if gae is not None:
from .google import GoogleDatastoreParser
|
stuarth/pixie
|
refs/heads/master
|
pixie/vm/libs/pxic/writer.py
|
8
|
from pixie.vm.libs.pxic.tags import *
from pixie.vm.object import runtime_error, Object, Type, InterpreterCodeInfo
from rpython.rlib.runicode import unicode_encode_utf_8
from pixie.vm.string import String
from pixie.vm.keyword import Keyword
from pixie.vm.symbol import Symbol
from pixie.vm.numbers import Integer, BigInteger, Float
from pixie.vm.code import Code, Var, NativeFn, Namespace
from pixie.vm.primitives import nil, true, false
from pixie.vm.reader import LinePromise
from rpython.rlib.objectmodel import specialize
from rpython.rlib.rarithmetic import r_uint
from rpython.rlib.rbigint import rbigint
import pixie.vm.rt as rt
MAX_INT32 = r_uint(1 << 31)
class Writer(object):
def __init__(self, wtr, with_cache=False):
self._wtr = wtr
self._obj_cache = {}
self._string_cache = {}
self._with_cache = with_cache
def write(self, s):
assert isinstance(s, str)
self._wtr.write(s)
def flush(self):
self._wtr.flush()
def write_cached_obj(self, o, wfn):
if self._with_cache:
idx = self._obj_cache.get(o, -1)
if idx == -1:
idx = len(self._obj_cache)
self._obj_cache[o] = idx
write_tag(NEW_CACHED_OBJ, self)
wfn(o, self)
else:
write_tag(CACHED_OBJ, self)
write_int_raw(r_uint(idx), self)
else:
return wfn(o, self)
def write_raw_cached_string(self, si):
assert isinstance(si, unicode)
if self._with_cache:
idx = self._string_cache.get(si, -1)
if idx == -1:
idx = len(self._string_cache)
self._string_cache[si] = idx
s = unicode_encode_utf_8(si, len(si), "?")
write_int_raw(len(s), self)
assert len(s) <= MAX_STRING_SIZE
self.write(s)
else:
write_int_raw(r_uint(MAX_STRING_SIZE + idx), self)
else:
errors = "?"
s = unicode_encode_utf_8(si, len(si), errors)
assert len(s) <= MAX_INT32
write_int_raw(len(s), self)
self.write(s)
def write_object(self, o):
write_object(o, self)
def finish(self):
write_tag(EOF, self)
self._wtr.flush()
class WriterBox(Object):
_type = Type(u"pixie.stdlib.WriterBox")
def type(self):
return WriterBox._type
def __init__(self, wtr):
self._pxic_writer = wtr
def get_pxic_writer(self):
return self._pxic_writer
def write_tag(tag, wtr):
assert tag <= 0xFF
wtr.write(chr(tag))
def write_int_raw(i, wtr):
#if 0 <= i <= SMALL_INT_MAX:
# wtr.write(chr((i & 0xFF) + SMALL_INT_START))
if 0 <= i <= MAX_INT32:
wtr.write(chr(i & 0xFF))
wtr.write(chr((i >> 8) & 0xFF))
wtr.write(chr((i >> 16) & 0xFF))
wtr.write(chr((i >> 24) & 0xFF))
else:
runtime_error(u"Raw int must be less than MAX_INT32, got: " + unicode(str(i)))
def write_string_raw(si, wtr):
wtr.write_raw_cached_string(si)
def write_bigint_raw(i, wtr):
bits = i.bit_length()
nchars = r_uint(bits / 8)
if (bits) % 8 != 0:
nchars += 1
assert nchars <= MAX_INT32
write_int_raw(nchars, wtr) # nchars used to represent the bigint
for j in range(nchars):
wtr.write(chr((i.rshift(j * 8).int_and_(0xFF).toint())))
def write_int(i, wtr):
if 0 <= i <= MAX_INT32:
wtr.write(chr(INT))
write_int_raw(i, wtr)
else:
wtr.write(chr(INT_STRING))
write_string_raw(unicode(str(i)), wtr)
def write_bigint(i, wtr):
if i.int_ge(0):
wtr.write(chr(BIGINT))
write_bigint_raw(i, wtr)
else:
wtr.write(chr(BIGINT_STRING))
write_string_raw(unicode(i.str()), wtr)
def write_float(f, wtr):
write_tag(FLOAT, wtr)
write_string_raw(unicode(str(f)), wtr)
def write_string(s, wtr):
write_tag(STRING, wtr)
write_string_raw(s, wtr)
def write_code(c, wtr):
assert isinstance(c, Code)
wtr.write(chr(CODE))
write_int_raw(len(c._bytecode), wtr)
for i in c._bytecode:
write_int_raw(i, wtr)
write_int_raw(len(c._consts), wtr)
for const in c._consts:
write_object(const, wtr)
write_int_raw(c._stack_size, wtr)
write_string_raw(c._name, wtr)
write_int_raw(c._arity, wtr)
write_int_raw(len(c._debug_points), wtr)
for k, v in c._debug_points.iteritems():
write_int_raw(k, wtr)
write_object(v, wtr)
class WriteParirFn(NativeFn):
def __init__(self, wtr):
self._wtr = wtr
def invoke(self, args):
kv = args[1]
write_object(rt._key(kv), self._wtr)
write_object(rt._val(kv), self._wtr)
return nil
def write_map(mp, wtr):
write_tag(MAP, wtr)
write_int_raw(rt.count(mp), wtr)
rt._reduce(mp, WriteParirFn(wtr), nil)
class WriteItem(NativeFn):
def __init__(self, wtr):
self._wtr = wtr
def invoke(self, args):
itm = args[1]
write_object(itm, self._wtr)
return nil
def write_vector(vec, wtr):
write_tag(VECTOR, wtr)
write_int_raw(rt.count(vec), wtr)
rt._reduce(vec, WriteItem(wtr), nil)
def write_seq(s, wtr):
write_tag(SEQ, wtr)
write_int_raw(rt.count(s), wtr)
s = rt.seq(s)
while s is not nil:
write_object(rt.first(s), wtr)
s = rt.next(s)
# def __init__(self, name, bytecode, consts, stack_size, debug_points, meta=nil):
# BaseCode.__init__(self)
# self._bytecode = bytecode
# self._consts = consts
# self._name = name
# self._stack_size = stack_size
# self._debug_points = debug_points
# self._meta = meta
def write_var(var, wtr):
assert isinstance(var, Var)
write_tag(VAR, wtr)
write_string_raw(var._ns, wtr)
write_string_raw(var._name, wtr)
write_tag(TRUE if var.is_dynamic() else FALSE, wtr)
def write_keyword(kw, wtr):
assert isinstance(kw, Keyword)
write_tag(KEYWORD, wtr)
write_string_raw(kw._str, wtr)
def write_symbol(sym, wtr):
assert isinstance(sym, Symbol)
write_tag(SYMBOL, wtr)
write_string_raw(sym._str, wtr)
def write_line_promise(o, wtr):
assert isinstance(o, LinePromise)
write_tag(LINE_PROMISE, wtr)
o.finalize()
write_string_raw(o._str, wtr)
def write_namespace(o, wtr):
assert isinstance(o, Namespace)
write_tag(NAMESPACE, wtr)
write_string_raw(o._name, wtr)
def write_interpreter_code_info(obj, wtr):
line, line_number, column_number, file = obj.interpreter_code_info_state()
write_tag(CODE_INFO, wtr)
write_object(line, wtr)
write_int_raw(r_uint(line_number), wtr)
write_int_raw(r_uint(column_number), wtr)
write_string_raw(file, wtr)
def write_object(obj, wtr):
wtr.flush()
if isinstance(obj, String):
write_string(rt.name(obj), wtr)
elif isinstance(obj, Integer):
write_int(obj.int_val(), wtr)
elif isinstance(obj, BigInteger): #TODO test
write_bigint(obj.bigint_val(), wtr)
elif isinstance(obj, Float):
write_float(obj.float_val(), wtr)
elif isinstance(obj, Code):
write_code(obj, wtr)
elif obj is nil:
wtr.write(chr(NIL))
elif isinstance(obj, Var):
#wtr.write_cached_obj(obj, write_var)
write_var(obj, wtr)
elif rt._satisfies_QMARK_(rt.IMap.deref(), obj):
write_map(obj, wtr)
elif rt._satisfies_QMARK_(rt.IVector.deref(), obj):
write_vector(obj, wtr)
elif rt._satisfies_QMARK_(rt.ISeq.deref(), obj):
write_seq(obj, wtr)
elif isinstance(obj, Keyword):
wtr.write_cached_obj(obj, write_keyword)
elif isinstance(obj, LinePromise):
wtr.write_cached_obj(obj, write_line_promise)
elif obj is true:
write_tag(TRUE, wtr)
elif obj is false:
write_tag(FALSE, wtr)
elif isinstance(obj, Symbol):
write_symbol(obj, wtr)
elif isinstance(obj, Namespace):
wtr.write_cached_obj(obj, write_namespace)
elif isinstance(obj, InterpreterCodeInfo):
wtr.write_cached_obj(obj, write_interpreter_code_info)
else:
from pixie.vm.libs.pxic.util import write_handlers
handler = write_handlers.get(obj.type(), None)
if handler is None:
runtime_error(u"Object is not supported by pxic writer: " + rt.name(rt.str(obj.type())))
else:
write_tag(TAGGED, wtr)
write_string_raw(obj.type().name(), wtr)
write_object(handler.invoke([obj]), wtr)
|
huanghao/iris-panel
|
refs/heads/master
|
iris/core/migrations/0024_auto__add_field_buildgroup_snapshot.py
|
7
|
# -*- coding: utf-8 -*-
# This file is part of IRIS: Infrastructure and Release Information System
#
# Copyright (C) 2013-2015 Intel Corporation
#
# IRIS is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2.0 as published by the Free Software Foundation.
#pylint: skip-file
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'BuildGroup.snapshot'
db.add_column(u'core_buildgroup', 'snapshot',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Snapshot'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'BuildGroup.snapshot'
db.delete_column(u'core_buildgroup', 'snapshot_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '225'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.buildgroup': {
'Meta': {'object_name': 'BuildGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'operate_reason': ('django.db.models.fields.TextField', [], {}),
'operated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'operator': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'snapshot': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Snapshot']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.domain': {
'Meta': {'object_name': 'Domain'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'core.domainrole': {
'Meta': {'unique_together': "(('role', 'domain'),)", 'object_name': 'DomainRole', '_ormbases': [u'auth.Group']},
'domain': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_set'", 'to': "orm['core.Domain']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'})
},
'core.gittree': {
'Meta': {'object_name': 'GitTree'},
'gitpath': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'licenses': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.License']", 'symmetrical': 'False'}),
'packages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Package']", 'symmetrical': 'False'}),
'subdomain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SubDomain']"})
},
'core.gittreerole': {
'Meta': {'unique_together': "(('role', 'gittree'),)", 'object_name': 'GitTreeRole', '_ormbases': [u'auth.Group']},
'gittree': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_set'", 'to': "orm['core.GitTree']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'})
},
'core.image': {
'Meta': {'unique_together': "(('name', 'target', 'product'),)", 'object_name': 'Image'},
'arch': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']"}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'core.imagebuild': {
'Meta': {'unique_together': "(('name', 'group'),)", 'object_name': 'ImageBuild'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BuildGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.URLField', [], {'max_length': '512'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512'})
},
'core.license': {
'Meta': {'object_name': 'License'},
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shortname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'core.package': {
'Meta': {'object_name': 'Package'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'core.packagebuild': {
'Meta': {'unique_together': "(('package', 'repo', 'arch', 'group'),)", 'object_name': 'PackageBuild'},
'arch': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BuildGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.URLField', [], {'max_length': '512'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Package']"}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512'})
},
'core.product': {
'Meta': {'object_name': 'Product'},
'description': ('django.db.models.fields.TextField', [], {}),
'gittrees': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.GitTree']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'core.snapshot': {
'Meta': {'unique_together': "(('product', 'buildid'),)", 'object_name': 'Snapshot'},
'buildid': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'daily_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'finished_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']"}),
'started_time': ('django.db.models.fields.DateTimeField', [], {}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'weekly_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'core.subdomain': {
'Meta': {'unique_together': "(('name', 'domain'),)", 'object_name': 'SubDomain'},
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Domain']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'core.subdomainrole': {
'Meta': {'unique_together': "(('role', 'subdomain'),)", 'object_name': 'SubDomainRole', '_ormbases': [u'auth.Group']},
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'subdomain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SubDomain']"})
},
'core.submission': {
'Meta': {'unique_together': "(('name', 'gittree'),)", 'object_name': 'Submission'},
'commit': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'gittree': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.GitTree']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.submissionbuild': {
'Meta': {'unique_together': "(('submission', 'product'),)", 'object_name': 'SubmissionBuild'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BuildGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']"}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Submission']"})
},
'core.userparty': {
'Meta': {'object_name': 'UserParty', '_ormbases': [u'auth.Group']},
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'party': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15'})
},
'core.userprofile': {
'Meta': {'object_name': 'UserProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['core']
|
NeCTAR-RC/swift
|
refs/heads/nectar/icehouse
|
test/unit/common/middleware/test_acl.py
|
21
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.middleware import acl
class TestACL(unittest.TestCase):
def test_clean_acl(self):
value = acl.clean_acl('header', '.r:*')
self.assertEquals(value, '.r:*')
value = acl.clean_acl('header', '.r:specific.host')
self.assertEquals(value, '.r:specific.host')
value = acl.clean_acl('header', '.r:.ending.with')
self.assertEquals(value, '.r:.ending.with')
value = acl.clean_acl('header', '.r:*.ending.with')
self.assertEquals(value, '.r:.ending.with')
value = acl.clean_acl('header', '.r:-*.ending.with')
self.assertEquals(value, '.r:-.ending.with')
value = acl.clean_acl('header', '.r:one,.r:two')
self.assertEquals(value, '.r:one,.r:two')
value = acl.clean_acl('header', '.r:*,.r:-specific.host')
self.assertEquals(value, '.r:*,.r:-specific.host')
value = acl.clean_acl('header', '.r:*,.r:-.ending.with')
self.assertEquals(value, '.r:*,.r:-.ending.with')
value = acl.clean_acl('header', '.r:one,.r:-two')
self.assertEquals(value, '.r:one,.r:-two')
value = acl.clean_acl('header', '.r:one,.r:-two,account,account:user')
self.assertEquals(value, '.r:one,.r:-two,account,account:user')
value = acl.clean_acl('header', 'TEST_account')
self.assertEquals(value, 'TEST_account')
value = acl.clean_acl('header', '.ref:*')
self.assertEquals(value, '.r:*')
value = acl.clean_acl('header', '.referer:*')
self.assertEquals(value, '.r:*')
value = acl.clean_acl('header', '.referrer:*')
self.assertEquals(value, '.r:*')
value = acl.clean_acl('header',
' .r : one , ,, .r:two , .r : - three ')
self.assertEquals(value, '.r:one,.r:two,.r:-three')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.unknown:test')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:*.')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r : * . ')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:-*.')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r : - * . ')
self.assertRaises(ValueError, acl.clean_acl, 'header', ' .r : ')
self.assertRaises(ValueError, acl.clean_acl, 'header', 'user , .r : ')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:-')
self.assertRaises(ValueError, acl.clean_acl, 'header', ' .r : - ')
self.assertRaises(ValueError, acl.clean_acl, 'header',
'user , .r : - ')
self.assertRaises(ValueError, acl.clean_acl, 'write-header', '.r:r')
def test_parse_acl(self):
self.assertEquals(acl.parse_acl(None), ([], []))
self.assertEquals(acl.parse_acl(''), ([], []))
self.assertEquals(acl.parse_acl('.r:ref1'), (['ref1'], []))
self.assertEquals(acl.parse_acl('.r:-ref1'), (['-ref1'], []))
self.assertEquals(acl.parse_acl('account:user'),
([], ['account:user']))
self.assertEquals(acl.parse_acl('account'), ([], ['account']))
self.assertEquals(acl.parse_acl('acc1,acc2:usr2,.r:ref3,.r:-ref4'),
(['ref3', '-ref4'], ['acc1', 'acc2:usr2']))
self.assertEquals(acl.parse_acl(
'acc1,acc2:usr2,.r:ref3,acc3,acc4:usr4,.r:ref5,.r:-ref6'),
(['ref3', 'ref5', '-ref6'],
['acc1', 'acc2:usr2', 'acc3', 'acc4:usr4']))
def test_parse_v2_acl(self):
# For all these tests, the header name will be "hdr".
tests = [
# Simple case: all ACL data in one header line
({'hdr': '{"a":1,"b":"foo"}'}, {'a': 1, 'b': 'foo'}),
# No header "hdr" exists -- should return None
({}, None),
({'junk': 'junk'}, None),
# Empty ACLs should return empty dict
({'hdr': ''}, {}),
({'hdr': '{}'}, {}),
({'hdr': '{ }'}, {}),
# Bad input -- should return None
({'hdr': '["array"]'}, None),
({'hdr': 'null'}, None),
({'hdr': '"some_string"'}, None),
({'hdr': '123'}, None),
]
for hdrs_in, expected in tests:
result = acl.parse_acl(version=2, data=hdrs_in.get('hdr'))
self.assertEquals(expected, result,
'%r: %r != %r' % (hdrs_in, result, expected))
def test_format_v1_acl(self):
tests = [
((['a', 'b'], ['c.com']), 'a,b,.r:c.com'),
((['a', 'b'], ['c.com', '-x.c.com']), 'a,b,.r:c.com,.r:-x.c.com'),
((['a', 'b'], None), 'a,b'),
((None, ['c.com']), '.r:c.com'),
((None, None), ''),
]
for (groups, refs), expected in tests:
result = acl.format_acl(
version=1, groups=groups, referrers=refs, header_name='hdr')
self.assertEquals(expected, result, 'groups=%r, refs=%r: %r != %r'
% (groups, refs, result, expected))
def test_format_v2_acl(self):
tests = [
({}, '{}'),
({'foo': 'bar'}, '{"foo":"bar"}'),
({'groups': ['a', 'b'], 'referrers': ['c.com', '-x.c.com']},
'{"groups":["a","b"],"referrers":["c.com","-x.c.com"]}'),
]
for data, expected in tests:
result = acl.format_acl(version=2, acl_dict=data)
self.assertEquals(expected, result,
'data=%r: %r *!=* %r' % (data, result, expected))
def test_acls_from_account_info(self):
test_data = [
({}, None),
({'sysmeta': {}}, None),
({'sysmeta':
{'core-access-control': '{"VERSION":1,"admin":["a","b"]}'}},
{'admin': ['a', 'b'], 'read-write': [], 'read-only': []}),
({
'some-key': 'some-value',
'other-key': 'other-value',
'sysmeta': {
'core-access-control': '{"VERSION":1,"admin":["a","b"],"r'
'ead-write":["c"],"read-only":[]}',
}},
{'admin': ['a', 'b'], 'read-write': ['c'], 'read-only': []}),
]
for args, expected in test_data:
result = acl.acls_from_account_info(args)
self.assertEqual(expected, result, "%r: Got %r, expected %r" %
(args, result, expected))
def test_referrer_allowed(self):
self.assert_(not acl.referrer_allowed('host', None))
self.assert_(not acl.referrer_allowed('host', []))
self.assert_(acl.referrer_allowed(None, ['*']))
self.assert_(acl.referrer_allowed('', ['*']))
self.assert_(not acl.referrer_allowed(None, ['specific.host']))
self.assert_(not acl.referrer_allowed('', ['specific.host']))
self.assert_(acl.referrer_allowed('http://www.example.com/index.html',
['.example.com']))
self.assert_(acl.referrer_allowed(
'http://user@www.example.com/index.html', ['.example.com']))
self.assert_(acl.referrer_allowed(
'http://user:pass@www.example.com/index.html', ['.example.com']))
self.assert_(acl.referrer_allowed(
'http://www.example.com:8080/index.html', ['.example.com']))
self.assert_(acl.referrer_allowed(
'http://user@www.example.com:8080/index.html', ['.example.com']))
self.assert_(acl.referrer_allowed(
'http://user:pass@www.example.com:8080/index.html',
['.example.com']))
self.assert_(acl.referrer_allowed(
'http://user:pass@www.example.com:8080', ['.example.com']))
self.assert_(acl.referrer_allowed('http://www.example.com',
['.example.com']))
self.assert_(not acl.referrer_allowed(
'http://thief.example.com',
['.example.com', '-thief.example.com']))
self.assert_(not acl.referrer_allowed(
'http://thief.example.com',
['*', '-thief.example.com']))
self.assert_(acl.referrer_allowed(
'http://www.example.com',
['.other.com', 'www.example.com']))
self.assert_(acl.referrer_allowed(
'http://www.example.com',
['-.example.com', 'www.example.com']))
# This is considered a relative uri to the request uri, a mode not
# currently supported.
self.assert_(not acl.referrer_allowed('www.example.com',
['.example.com']))
self.assert_(not acl.referrer_allowed('../index.html',
['.example.com']))
self.assert_(acl.referrer_allowed('www.example.com', ['*']))
if __name__ == '__main__':
unittest.main()
|
hackerkid/zulip
|
refs/heads/master
|
zerver/webhooks/slack/view.py
|
1
|
from django.http import HttpRequest
from django.utils.translation import ugettext as _
from zerver.decorator import webhook_view
from zerver.lib.actions import check_send_stream_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.models import UserProfile
ZULIP_MESSAGE_TEMPLATE = "**{message_sender}**: `{text}`"
VALID_OPTIONS = {"SHOULD_NOT_BE_MAPPED": "0", "SHOULD_BE_MAPPED": "1"}
@webhook_view("Slack", notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_slack_webhook(
request: HttpRequest,
user_profile: UserProfile,
user_name: str = REQ(),
text: str = REQ(),
channel_name: str = REQ(),
stream: str = REQ(default="slack"),
channels_map_to_topics: str = REQ(default="1"),
) -> HttpRequest:
if channels_map_to_topics not in list(VALID_OPTIONS.values()):
return json_error(_("Error: channels_map_to_topics parameter other than 0 or 1"))
if channels_map_to_topics == VALID_OPTIONS["SHOULD_BE_MAPPED"]:
subject = f"channel: {channel_name}"
else:
stream = channel_name
subject = _("Message from Slack")
content = ZULIP_MESSAGE_TEMPLATE.format(message_sender=user_name, text=text)
check_send_stream_message(user_profile, request.client, stream, subject, content)
return json_success()
|
obi-two/Rebelion
|
refs/heads/master
|
data/scripts/templates/object/tangible/hair/trandoshan/base/shared_hair_trandoshan_base.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/hair/trandoshan/base/shared_hair_trandoshan_base.iff"
result.attribute_template_id = -1
result.stfName("hair_name","ridges")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
zookeepr/zookeepr
|
refs/heads/master
|
zkpylons/controllers/product_category.py
|
5
|
import logging
from pylons import request, response, session, tmpl_context as c
from zkpylons.lib.helpers import redirect_to
from pylons.decorators import validate
from pylons.decorators.rest import dispatch_on
from formencode import validators, htmlfill, ForEach, Invalid
from formencode.variabledecode import NestedVariables
from zkpylons.lib.base import BaseController, render
from zkpylons.lib.ssl_requirement import enforce_ssl
from zkpylons.lib.validators import BaseSchema, ProductValidator
import zkpylons.lib.helpers as h
from authkit.authorize.pylons_adaptors import authorize
from authkit.permissions import ValidAuthKitUser
from zkpylons.lib.mail import email
from zkpylons.model import meta
from zkpylons.model.product import Product, ProductInclude
from zkpylons.model.product_category import ProductCategory
log = logging.getLogger(__name__)
class NotExistingProductCategoryValidator(validators.FancyValidator):
def validate_python(self, values, state):
product_category = ProductCategory.find_by_name(values['product_category']['name'])
if product_category != None and product_category != c.product_category:
message = "Duplicate product category name"
error_dict = {'product_category.name': "Category name already in use"}
raise Invalid(message, values, state, error_dict=error_dict)
class ProductCategorySchema(BaseSchema):
name = validators.String(not_empty=True)
description = validators.String(not_empty=True)
note = validators.String()
display = validators.String(not_empty=True)
display_mode = validators.String()
display_order = validators.Int(min=0, max=2000000, not_empty=True)
invoice_free_products = validators.Bool(if_missing=False)
min_qty = validators.Int(min=0, max=2000000)
max_qty = validators.Int(min=0, max=2000000)
# TODO: check that min_qty <= max_qty
class NewProductCategorySchema(BaseSchema):
product_category = ProductCategorySchema()
pre_validators = [NestedVariables]
chained_validators = [NotExistingProductCategoryValidator()]
class EditProductCategorySchema(BaseSchema):
product_category = ProductCategorySchema()
pre_validators = [NestedVariables]
class ProductCategoryController(BaseController):
@enforce_ssl(required_all=True)
@authorize(h.auth.has_organiser_role)
def __before__(self, **kwargs):
pass
@dispatch_on(POST="_new")
def new(self):
return render('/product_category/new.mako')
@validate(schema=NewProductCategorySchema(), form='new', post_only=True, on_get=True, variable_decode=True)
def _new(self):
results = self.form_result['product_category']
c.product_category = ProductCategory(**results)
meta.Session.add(c.product_category)
meta.Session.commit()
h.flash("Category created")
redirect_to(action='view', id=c.product_category.id)
def view(self, id):
c.product_category = ProductCategory.find_by_id(id)
return render('/product_category/view.mako')
def stats(self, id):
c.can_edit = True
c.product_category = ProductCategory.find_by_id(id)
c.product_categories = ProductCategory.find_all()
return render('/product_category/stats.mako')
def index(self):
c.can_edit = True
c.product_category_collection = ProductCategory.find_all()
return render('/product_category/list.mako')
@dispatch_on(POST="_edit")
def edit(self, id):
c.product_category = ProductCategory.find_by_id(id)
defaults = h.object_to_defaults(c.product_category, 'product_category')
form = render('/product_category/edit.mako')
return htmlfill.render(form, defaults)
@validate(schema=EditProductCategorySchema(), form='edit', post_only=True, on_get=True, variable_decode=True)
def _edit(self, id):
product_category = ProductCategory.find_by_id(id)
for key in self.form_result['product_category']:
setattr(product_category, key, self.form_result['product_category'][key])
# update the objects with the validated form data
meta.Session.commit()
h.flash("The product_category has been updated successfully.")
redirect_to(action='view', id=id)
@dispatch_on(POST="_delete")
def delete(self, id):
"""Delete the product_category
GET will return a form asking for approval.
POST requests will delete the item.
"""
c.product_category = ProductCategory.find_by_id(id)
return render('/product_category/confirm_delete.mako')
@validate(schema=None, form='delete', post_only=True, on_get=True, variable_decode=True)
def _delete(self, id):
c.product_category = ProductCategory.find_by_id(id)
# For some reason cascading isn't working for me. Likely I just don't understand SA so I'll do it this way:
# first delete all of the products
for product in c.product_category.products:
# We also delete all of the productincludes for the products
for include in ProductInclude.find_by_product(product.id):
meta.Session.delete(include)
meta.Session.commit()
meta.Session.delete(product)
meta.Session.commit()
# Also delete any includes of the category
for include in ProductInclude.find_by_category(id):
meta.Session.delete(include)
meta.Session.commit()
meta.Session.delete(c.product_category)
meta.Session.commit()
h.flash("Category has been deleted.")
redirect_to('index')
|
WojciechMula/sse4-strstr
|
refs/heads/master
|
original/sse4_strstr-test.py
|
1
|
import sys, os, random
filename = "<unspecified>"
try:
filename = sys.argv[1]
string = open(filename, "r").read()
except:
print "can't open '%s'" % filename
sys.exit(1)
try:
random.seed(int(sys.argv[3]))
except:
pass
def time_command(command):
os.system('/usr/bin/time -o /tmp/measure -f "%U" ' + command)
f = open("/tmp/measure", "r")
t = float(f.read())
f.close()
return t
def time(command1, command2, iters=10):
while True:
t1 = time_command(command1.replace("__iters__", str(iters)))
if t1 > 1:
t2 = time_command(command2.replace("__iters__", str(iters)))
return iters, t1, t2
else:
iters *= 10
def compare(filename, wordpos, word, wordlen):
word = word.replace("%", "%%")
cmd1 = './a.out "%s" libc __iters__ "%s" > /dev/null' % (filename, word)
cmd2 = './a.out "%s" sse4 __iters__ "%s" > /dev/null' % (filename, word)
_, t1, t2 = time(cmd1, cmd2)
return "[%d,%d] libc=%0.3fs sse4=%0.3fs speedup=%0.2f" % (wordpos, wordlen, t1, t2, t1/t2)
logname = "sse4.log"
lognumber = 1
while True:
if not os.path.exists(logname):
log = open(logname, "w")
break
else:
logname = "sse4%d.log" % lognumber
lognumber += 1
try:
for n in xrange(4, 64):
i1 = random.randint( 0, 64)
i2 = random.randint( 65, 1024)
i3 = random.randint(1024, len(string)-n)
print "length", n
for i in [i1, i2, i3]:
word = string[i:i+n]
for c in "\\`()<>{}\"":
word = word.replace(c, "\\" + c)
cmd = './a.out "%s" verify 1 "%s"' % (filename, word)
err = os.system(cmd)
if err:
print repr(string[i:i+l])
sys.exit(1)
else:
s = compare(filename, i, word, n)
log.write(s + "\n")
print s
except:
import traceback
traceback.print_exc()
log.close()
|
aheadlead/PyTetris
|
refs/heads/master
|
controller.py
|
1
|
#!/usr/bin/env python
# coding=utf-8
from Queue import Queue
__author__ = 'weiyulan'
from view import *
from model import *
from time import time, sleep
from Queue import Queue
class PyTerisController(object):
framerate_limit = 10
def __init__(self, view, model):
"""
:type view: PyTetrisViewBase 的子类
:param view: 视图
:type model: PyTetrisModel
:param model: 模型
:raise Exception: 异常。
"""
self.view = view
self.model = model
self.previous_timestamp = 0
self.timestamp = 0
self.event_from_view = Queue()
def gameover_callback(self):
self.view.end()
def run(self):
self.timestamp = time()
self.view.clear_screen()
while True:
self.view.map = self.model.map
self.view.score = self.model.score
self.view.next_block = self.model.next_block
self.view.active_block = self.model.active_block
self.view.active_block_position = self.model.active_block_position
while self.event_from_view.empty() is False:
event = self.event_from_view.get()
stderr.write("controller: get event \"" + event + "\"\n")
if event == "up":
self.model.press_rotate_key()
elif event == "left":
self.model.press_arrow_key("left")
elif event == "right":
self.model.press_arrow_key("right")
elif event == "down" or event == "space":
self.model.press_hard_drop()
elif event == "s":
self.model.start()
self.view.begin()
elif event == "q":
exit()
self.view.update()
# TODO for debug (disabled)
# stderr.write("controller: redraw the view\n")
self.previous_timestamp = self.timestamp
self.timestamp = time()
wait_interval = 1.0/self.framerate_limit - (self.timestamp-self.previous_timestamp)
if wait_interval < 0:
# 掉帧
wait_interval = 0
print wait_interval
sleep(wait_interval)
if __name__ == "__main__":
# TODO for debug
stderr.write('-'*40 + '\n')
current_model = PyTetrisModel()
current_view = PyTetrisViewMulticaster()
current_view.add_view(PyTerisViewTerminal())
current_view.add_view(PyTetrisViewGUI())
current_controller = PyTerisController(view=current_view, model=current_model)
current_view.event_callback = current_controller.event_from_view.put
current_controller.run()
|
coreos/dev-util
|
refs/heads/master
|
host/lib/fdt.py
|
3
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This library provides basic access to an fdt blob."""
import optparse
import os
import re
import shutil
import sys
import cros_output
import tools
from tools import Tools
from tools import CmdError
_base = os.path.dirname(sys.argv[0])
class Fdt:
"""Provides simple access to a flat device tree blob
Properties:
fname: Filename of fdt
"""
def __init__(self, tools, fname):
self.fname = fname
self.tools = tools
root, ext = os.path.splitext(fname)
self._is_compiled = ext == '.dtb'
def GetProp(self, node, prop, default=None):
"""Get a property from a device tree.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetProp('/lcd', 'width')
'1366'
>>> fdt.GetProp('/', 'fluffy')
Traceback (most recent call last):
...
CmdError: Command failed: fdtget ../tests/test.dtb / fluffy
Error at 'fluffy': FDT_ERR_NOTFOUND
<BLANKLINE>
This looks up the given node and property, and returns the value as a
string,
If the node or property does not exist, this will return the default value.
Args:
node: Full path to node to look up.
prop: Property name to look up.
default: Default value to return if nothing is present in the fdt, or
None to raise in this case. This will be converted to a string.
Returns:
string containing the property value.
Raises:
CmdError: if the property does not exist and no default is provided.
"""
args = [self.fname, node, prop]
if default is not None:
args += ['-d', str(default)]
out = self.tools.Run('fdtget', args)
return out.strip()
def GetProps(self, node, convert_dashes=False):
"""Get all properties from a node.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetProps('/')
{'compatible': '1853253988 1767976051 1700881007 1634886656 1853253988 \
1767976052 1701278305 842346496', '#size-cells': '1', 'model': \
'NVIDIA Seaboard', '#address-cells': '1', 'interrupt-parent': '1'}
Args:
node: node name to look in.
convert_dashes: True to convert - to _ in node names.
Returns:
A dictionary containing all the properties, indexed by node name.
The entries are simply strings - no decoding of lists or numbers is
done.
Raises:
CmdError: if the node does not exist.
"""
out = self.tools.Run('fdtget', [self.fname, node, '-p'])
props = out.strip().splitlines()
props_dict = {}
for prop in props:
name = prop
if convert_dashes:
prop = re.sub('-', '_', prop)
props_dict[prop] = self.GetProp(node, name)
return props_dict
def DecodeIntList(self, node, prop, int_list_str, num_values=None):
"""Decode a string into a list of integers.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.DecodeIntList('/', 'galveston', '1 2 3 4')
[1, 2, 3, 4]
>>> fdt.DecodeIntList('/', 'galveston', '1 2 3 4', 4)
[1, 2, 3, 4]
>>> fdt.DecodeIntList('/', 'galveston', '1 2 3 4', 3)
Traceback (most recent call last):
...
ValueError: GetIntList of node '/' prop 'galveston' returns \
'<type 'list'>', which has 4 elements, but 3 expected
This decodes a string containing a list of integers like '1 2 3' into
a list like [1 2 3].
Args:
node: Full path to node to report in any error raised.
prop: Property name to report in any error raised.
int_list_str: String to decode.
num_values: If not None, then the array is checked to make sure it
has this many values, and an error is raised if not.
Returns:
List of integers.
Raises:
ValueError if the list is the wrong size.
"""
int_list = int_list_str.split()
if num_values and num_values != len(int_list):
raise ValueError, ("GetIntList of node '%s' prop '%s' returns '%s'"
", which has %d elements, but %d expected" %
(node, prop, list, len(int_list), num_values))
return [int(item) for item in int_list]
def GetIntList(self, node, prop, num_values=None, default=None):
"""Read a property and decode it into a list of integers.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetIntList('/flash@0/shared-dev-cfg@180000', 'reg')
[1572864, 262144]
>>> fdt.GetIntList('/flash/shared-dev-cfg', 'reg')
[1572864, 262144]
>>> fdt.GetIntList('/flash/shared-dev-cfg', 'reg', 3)
Traceback (most recent call last):
...
ValueError: GetIntList of node '/flash/shared-dev-cfg' prop 'reg' returns \
'<type 'list'>', which has 2 elements, but 3 expected
>>> fdt.GetIntList('/swaffham', 'bulbeck', 2)
Traceback (most recent call last):
...
CmdError: Command failed: fdtget ../tests/test.dtb /swaffham bulbeck
Error at '/swaffham': FDT_ERR_NOTFOUND
<BLANKLINE>
>>> fdt.GetIntList('/lcd', 'bulbeck', 2, '5 6')
[5, 6]
This decodes a property containing a list of integers like '1 2 3' into
a list like [1 2 3].
Args:
node: Full path to node to look up.
prop: Property name to look up.
num_values: If not None, then the array is checked to make sure it
has this many values, and an error is raised if not.
Returns:
List of integers.
Raises:
ValueError if the list is the wrong size.
CmdError: if the property does not exist.
"""
return self.DecodeIntList(node, prop, self.GetProp(node, prop, default),
num_values)
def GetInt(self, node, prop, default=None):
"""Gets an integer from a device tree property.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetInt('/lcd', 'width')
1366
>>> fdt.GetInt('/lcd', 'rangiora')
Traceback (most recent call last):
...
CmdError: Command failed: fdtget ../tests/test.dtb /lcd rangiora
Error at 'rangiora': FDT_ERR_NOTFOUND
<BLANKLINE>
>>> fdt.GetInt('/lcd', 'rangiora', 1366)
1366
Args:
node: Full path to node to look up.
prop: Property name to look up.
Raises:
ValueError if the property cannot be converted to an integer.
CmdError: if the property does not exist.
"""
value = self.GetIntList(node, prop, 1, default)[0]
return int(value)
def GetString(self, node, prop, default=None):
"""Gets a string from a device tree property.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetString('/display', 'compatible')
'nvidia,tegra250-display'
Args:
node: Full path to node to look up.
prop: Property name to look up.
Raises:
CmdError: if the property does not exist.
"""
return self.GetProp(node, prop, default)
def GetFlashPart(self, section, part):
"""Returns the setup of the given section/part number in the flash map.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetFlashPart('ro', 'onestop')
[65536, 524288]
Args:
section: Section name to look at: ro, rw-a, etc.
part: Partition name to look at: gbb, vpd, etc.
Returns:
Tuple (position, size) of flash area in bytes.
"""
return self.GetIntList('/flash/%s-%s' % (section, part), 'reg', 2)
def GetFlashPartSize(self, section, part):
"""Returns the size of the given section/part number in the flash map.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetFlashPartSize('ro', 'onestop')
524288
Args:
section: Section name to look at: ro, rw-a, etc.
part: Partition name to look at: gbb, vpd, etc.
Returns:
Size of flash area in bytes.
"""
return self.GetFlashPart(section, part)[1]
def GetChildren(self, node):
"""Returns a list of children of a given node.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetChildren('/amba')
['interrupt-controller@50041000']
>>> fdt.GetChildren('/flash@0')
['onestop-layout@0', 'firmware-image@0', 'verification-block@7df00', \
'firmware-id@7ff00', 'readonly@0', 'bct@0', 'ro-onestop@10000', \
'ro-gbb@90000', 'ro-data@b0000', 'ro-vpd@c0000', 'fmap@d0000', \
'readwrite@100000', 'rw-vpd@100000', 'shared-dev-cfg@180000', \
'shared-data@1c0000', 'shared-env@1ff000', 'readwrite-a@200000', \
'rw-a-onestop@200000', 'readwrite-b@300000', 'rw-b-onestop@300000']
Args:
node: Node to return children from.
Returns:
List of children in the node.
Raises:
CmdError: if the node does not exist.
"""
out = self.tools.Run('fdtget', [self.fname, '-l', node])
return out.strip().splitlines()
def GetLabel(self, node):
"""Returns the label property of a given node.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetLabel('/flash/ro-onestop')
'ro-onestop'
>>> fdt.GetLabel('/go/hotspurs')
Traceback (most recent call last):
...
CmdError: Command failed: fdtget ../tests/test.dtb /go/hotspurs label
Error at '/go/hotspurs': FDT_ERR_NOTFOUND
<BLANKLINE>
Args:
node: Node to return label property from.
Raises:
CmdError: if the node or property does not exist.
"""
return self.GetString(node, 'label')
def Copy(self, new_name):
"""Make a copy of the FDT into another file, and return its object.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> our_copy = fdt.Copy(os.path.join(_base, '../tests/copy.dtb'))
>>> our_copy.PutString('/display', 'compatible', 'north')
>>> fdt.GetString('/display', 'compatible')
'nvidia,tegra250-display'
>>> our_copy.GetString('/display', 'compatible')
'north'
This copies the FDT into a supplied file, then creates an FDT object to
access the copy.
Args:
new_name: Filename to write copy to.
Returns:
An Fdt object for the copy.
"""
shutil.copyfile(self.tools.Filename(self.fname),
self.tools.Filename(new_name))
return Fdt(self.tools, new_name)
def PutString(self, node, prop, value_str):
"""Writes a string to a property in the fdt.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> our_copy = fdt.Copy(os.path.join(_base, '../tests/copy.dtb'))
>>> our_copy.PutString('/display', 'compatible', 'north')
>>> fdt.GetString('/display', 'compatible')
'nvidia,tegra250-display'
>>> our_copy.PutString('/display', 'compatible', 'south')
>>> our_copy.GetString('/display', 'compatible')
'south'
Args:
node: Full path to node to look up.
prop: Property name to look up.
value_str: String to write.
"""
args = ['-p', '-t', 's', self.fname, node, prop, value_str]
self.tools.Run('fdtput', args)
def PutInteger(self, node, prop, value_int):
"""Writes a string to a property in the fdt.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> our_copy = fdt.Copy(os.path.join(_base, '../tests/copy.dtb'))
>>> our_copy.PutString('/display', 'compatible', 'north')
>>> fdt.GetString('/display', 'compatible')
'nvidia,tegra250-display'
>>> our_copy.PutString('/display', 'compatible', 'south')
>>> our_copy.GetString('/display', 'compatible')
'south'
Args:
node: Full path to node to look up.
prop: Property name to look up.
value_int: Integer to write.
"""
args = ['-p', '-t', 'i', self.fname, node, prop, str(value_int)]
self.tools.Run('fdtput', args)
def PutIntList(self, node, prop, int_list):
"""Write a list of integers into an fdt property.
>>> tools = Tools(cros_output.Output())
>>> fdt = Fdt(tools, os.path.join(_base, '../tests/test.dtb'))
>>> fdt.GetIntList('/flash@0/shared-dev-cfg@180000', 'reg')
[1572864, 262144]
>>> fdt.PutIntList('/flash/shared-dev-cfg', 'victoria', [1, 2, 3])
>>> fdt.GetIntList('/flash/shared-dev-cfg', 'victoria', 3)
[1, 2, 3]
>>> fdt.PutIntList('/flash/shared-dev-cfg', 'victoria', [3])
>>> fdt.GetIntList('/flash/shared-dev-cfg', 'victoria', 1)
[3]
>>> fdt.PutIntList('/flash/shared-dev-cfg', 'victoria', [])
>>> fdt.GetIntList('/flash/shared-dev-cfg', 'victoria', 0)
[]
Args:
node: Full path to node to look up.
prop: Property name to look up.
int_list: List of integers to write.
"""
value_list = [str(s) for s in int_list]
args = ['-p', '-t', 'i', self.fname, node, prop]
args.extend(value_list)
self.tools.Run('fdtput', args)
def Compile(self, arch_dts):
"""Compile an fdt .dts source file into a .dtb binary blob
>>> tools = Tools(cros_output.Output())
>>> tools.PrepareOutputDir(None)
>>> src_path = '../tests/dts'
>>> src = os.path.join(src_path, 'source.dts')
>>> fdt = Fdt(tools, src)
>>> fdt.Compile()
>>> os.path.exists(os.path.join(tools.outdir, 'source.dtb'))
True
>>> if os.path.exists('../tests/source.dtb'):
... os.remove('../tests/source.dtb')
# Now check that search paths work
>>> fdt = Fdt(tools, '../tests/source.dts')
>>> fdt.Compile() #doctest:+IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
CmdError: Command failed: dtc -I dts -o /tmp/tmpcYO7Fm/source.dtb -O \
dtb -p 4096 ../tests/source.dts
DTC: dts->dtb on file "../tests/source.dts"
FATAL ERROR: Couldn't open "tegra250.dtsi": No such file or directory
<BLANKLINE>
>>> tools.search_paths = ['../tests/dts']
>>> #fdt.Compile()
Args:
arch_dts: Architecture/SOC .dtsi include file.
"""
if not self._is_compiled:
root, ext = os.path.splitext(self.fname)
# crosbug.com/31621
# This is a temporary hack to support upstream U-Boot
# Since it does not have the benefit of the dtc -i option, it uses
# the C preprocessor to find its include files. Here we must perform
# that task manually for the compiler. Since it is just as easy to
# do with the string replace feature, use that.
data = self.tools.ReadFile(self.fname)
fname = self.fname
if 'ARCH_CPU_DTS' in data:
fname = os.path.join(self.tools.outdir, os.path.basename(root) +
'.dts')
data = data.replace('ARCH_CPU_DTS', '"%s"' % arch_dts)
self.tools.WriteFile(fname, data)
# If we don't have a directory, put it in the tools tempdir
out_fname = os.path.join(self.tools.outdir, os.path.basename(root) +
'.dtb')
search_list = []
for path in self.tools.search_paths:
search_list.extend(['-i', path])
args = ['-I', 'dts', '-o', out_fname, '-O', 'dtb', '-p', '4096']
args.extend(search_list)
args.append(fname)
self.tools.Run('dtc', args)
self.fname = out_fname
self._is_compiled = True
def main():
"""Main function for cros_bundle_firmware.
This just lists out the children of the root node, along with all their
properties.
"""
parser = optparse.OptionParser()
parser.add_option('-d', '--dt', dest='fdt', type='string', action='store',
help='Path to fdt file to use (binary ,dtb)', default='u-boot.dtb')
(options, args) = parser.parse_args(sys.argv)
tools = Tools(cros_output.Output())
fdt = Fdt(tools, options.fdt)
children = fdt.GetChildren('/')
for child in children:
print '%s: %s\n' % (child, fdt.GetProps('/' + child))
def _Test(argv):
"""Run any built-in tests."""
import doctest
doctest.testmod()
if __name__ == '__main__':
# If first argument is --test, run testing code.
if sys.argv[1:2] == ["--test"]:
_Test([sys.argv[0]] + sys.argv[2:])
else:
main()
|
KnowNo/reviewboard
|
refs/heads/master
|
reviewboard/diffviewer/management/commands/condensediffs.py
|
4
|
from __future__ import unicode_literals, division
import sys
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.humanize.templatetags.humanize import intcomma
from django.core.management.base import NoArgsCommand
from django.utils.translation import ugettext as _, ungettext_lazy as N_
from reviewboard.diffviewer.models import FileDiff
class Command(NoArgsCommand):
help = ('Condenses the diffs stored in the database, reducing space '
'requirements')
DELAY_SHOW_REMAINING_SECS = 30
TIME_REMAINING_CHUNKS = (
(60 * 60 * 24 * 365, N_('%d year', '%d years')),
(60 * 60 * 24 * 30, N_('%d month', '%d months')),
(60 * 60 * 24 * 7, N_('%d week', '%d weeks')),
(60 * 60 * 24, N_('%d day', '%d days')),
(60 * 60, N_('%d hour', '%d hours')),
(60, N_('%d minute', '%d minutes'))
)
# We add a bunch of spaces in order to override any previous
# content on the line, for when it shrinks.
TIME_REMAINING_STR = _('%s remaining ')
CALC_TIME_REMAINING_STR = _('Calculating time remaining')
def handle_noargs(self, **options):
counts = FileDiff.objects.get_migration_counts()
total_count = counts['total_count']
if total_count == 0:
self.stdout.write(_('All diffs have already been migrated.\n'))
return
self.stdout.write(
_('Processing %(count)d diffs for duplicates...\n'
'\n'
'This may take a while. It is safe to continue using '
'Review Board while this is\n'
'processing, but it may temporarily run slower.\n'
'\n')
% {'count': total_count})
# Don't allow queries to be stored.
settings.DEBUG = False
self.start_time = datetime.now()
self.prev_prefix_len = 0
self.prev_time_remaining_s = ''
self.show_remaining = False
info = FileDiff.objects.migrate_all(self._on_batch_done, counts)
old_diff_size = info['old_diff_size']
new_diff_size = info['new_diff_size']
self.stdout.write(
_('\n'
'\n'
'Condensed stored diffs from %(old_size)s bytes to '
'%(new_size)s bytes (%(savings_pct)0.2f%% savings)\n')
% {
'old_size': intcomma(old_diff_size),
'new_size': intcomma(new_diff_size),
'savings_pct': (float(old_diff_size - new_diff_size) /
float(old_diff_size) * 100),
})
def _on_batch_done(self, processed_count, total_count):
"""Handler for when a batch of diffs are processed.
This will report the progress of the operation, showing the estimated
amount of time remaining.
"""
pct = processed_count * 100 / total_count
delta = datetime.now() - self.start_time
# XXX: This can be replaced with total_seconds() once we no longer have
# to support Python 2.6
delta_secs = ((delta.microseconds +
(delta.seconds + delta.days * 24 * 3600) * 10 ** 6)
/ 10 ** 6)
if (not self.show_remaining and
delta_secs >= self.DELAY_SHOW_REMAINING_SECS):
self.show_remaining = True
if self.show_remaining:
secs_left = ((delta_secs // processed_count) *
(total_count - processed_count))
time_remaining_s = (self.TIME_REMAINING_STR
% self._time_remaining(secs_left))
else:
time_remaining_s = self.CALC_TIME_REMAINING_STR
prefix_s = ' [%s%%] %s/%s - ' % (pct, processed_count, total_count)
# NOTE: We use sys.stdout here instead of self.stderr in order
# to control newlines. Command.stderr will force a \n for
# each write.
sys.stdout.write(prefix_s)
# Only write out the time remaining string if it has changed or
# there's been a shift in the length of the prefix. This reduces
# how much we have to write to the terminal, and how often, by
# a fair amount.
if (self.prev_prefix_len != len(prefix_s) or
self.prev_time_remaining_s != time_remaining_s):
# Something has changed, so output the string and then cache
# the values for the next call.
sys.stdout.write(time_remaining_s)
self.prev_prefix_len = len(prefix_s)
self.prev_time_remaining_s = time_remaining_s
sys.stdout.write('\r')
sys.stdout.flush()
def _time_remaining(self, secs_left):
"""Returns a string representing the time remaining for the operation.
This is a simplified version of Django's timeuntil() function that
does fewer calculations in order to reduce the amount of time we
have to spend every loop. For instance, it doesn't bother with
constructing datetimes and recomputing deltas, since we already
have those, and it doesn't rebuild the TIME_REMAINING_CHUNKS
every time it's called. It also handles seconds.
"""
delta = timedelta(seconds=secs_left)
since = delta.days * 24 * 60 * 60 + delta.seconds
if since < 60:
return N_('%d second', '%d seconds') % since
for i, (seconds, name) in enumerate(self.TIME_REMAINING_CHUNKS):
count = since // seconds
if count != 0:
break
result = name % count
if i + 1 < len(self.TIME_REMAINING_CHUNKS):
seconds2, name2 = self.TIME_REMAINING_CHUNKS[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
result += ', ' + name2 % count2
return result
|
CSC301H-Fall2013/JuakStore
|
refs/heads/master
|
site-packages/django/utils/dates.py
|
488
|
"Commonly-used date structures"
from django.utils.translation import ugettext_lazy as _, pgettext_lazy
WEEKDAYS = {
0:_('Monday'), 1:_('Tuesday'), 2:_('Wednesday'), 3:_('Thursday'), 4:_('Friday'),
5:_('Saturday'), 6:_('Sunday')
}
WEEKDAYS_ABBR = {
0:_('Mon'), 1:_('Tue'), 2:_('Wed'), 3:_('Thu'), 4:_('Fri'),
5:_('Sat'), 6:_('Sun')
}
WEEKDAYS_REV = {
'monday':0, 'tuesday':1, 'wednesday':2, 'thursday':3, 'friday':4,
'saturday':5, 'sunday':6
}
MONTHS = {
1:_('January'), 2:_('February'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'),
7:_('July'), 8:_('August'), 9:_('September'), 10:_('October'), 11:_('November'),
12:_('December')
}
MONTHS_3 = {
1:_('jan'), 2:_('feb'), 3:_('mar'), 4:_('apr'), 5:_('may'), 6:_('jun'),
7:_('jul'), 8:_('aug'), 9:_('sep'), 10:_('oct'), 11:_('nov'), 12:_('dec')
}
MONTHS_3_REV = {
'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6, 'jul':7, 'aug':8,
'sep':9, 'oct':10, 'nov':11, 'dec':12
}
MONTHS_AP = { # month names in Associated Press style
1: pgettext_lazy('abbrev. month', 'Jan.'),
2: pgettext_lazy('abbrev. month', 'Feb.'),
3: pgettext_lazy('abbrev. month', 'March'),
4: pgettext_lazy('abbrev. month', 'April'),
5: pgettext_lazy('abbrev. month', 'May'),
6: pgettext_lazy('abbrev. month', 'June'),
7: pgettext_lazy('abbrev. month', 'July'),
8: pgettext_lazy('abbrev. month', 'Aug.'),
9: pgettext_lazy('abbrev. month', 'Sept.'),
10: pgettext_lazy('abbrev. month', 'Oct.'),
11: pgettext_lazy('abbrev. month', 'Nov.'),
12: pgettext_lazy('abbrev. month', 'Dec.')
}
MONTHS_ALT = { # required for long date representation by some locales
1: pgettext_lazy('alt. month', 'January'),
2: pgettext_lazy('alt. month', 'February'),
3: pgettext_lazy('alt. month', 'March'),
4: pgettext_lazy('alt. month', 'April'),
5: pgettext_lazy('alt. month', 'May'),
6: pgettext_lazy('alt. month', 'June'),
7: pgettext_lazy('alt. month', 'July'),
8: pgettext_lazy('alt. month', 'August'),
9: pgettext_lazy('alt. month', 'September'),
10: pgettext_lazy('alt. month', 'October'),
11: pgettext_lazy('alt. month', 'November'),
12: pgettext_lazy('alt. month', 'December')
}
|
alexkogon/ansible
|
refs/heads/devel
|
v1/ansible/module_utils/basic.py
|
81
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# == BEGIN DYNAMICALLY INSERTED CODE ==
ANSIBLE_VERSION = "<<ANSIBLE_VERSION>>"
MODULE_ARGS = "<<INCLUDE_ANSIBLE_MODULE_ARGS>>"
MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
SELINUX_SPECIAL_FS="<<SELINUX_SPECIAL_FILESYSTEMS>>"
# ansible modules can be written in any language. To simplify
# development of Python modules, the functions available here
# can be inserted in any module source automatically by including
# #<<INCLUDE_ANSIBLE_MODULE_COMMON>> on a blank line by itself inside
# of an ansible module. The source of this common code lives
# in lib/ansible/module_common.py
import locale
import os
import re
import pipes
import shlex
import subprocess
import sys
import syslog
import types
import time
import select
import shutil
import stat
import tempfile
import traceback
import grp
import pwd
import platform
import errno
import tempfile
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
sys.stderr.write('Error: ansible requires a json module, none found!')
sys.exit(1)
except SyntaxError:
sys.stderr.write('SyntaxError: probably due to json and python being for different versions')
sys.exit(1)
HAVE_SELINUX=False
try:
import selinux
HAVE_SELINUX=True
except ImportError:
pass
HAVE_HASHLIB=False
try:
from hashlib import sha1 as _sha1
HAVE_HASHLIB=True
except ImportError:
from sha import sha as _sha1
try:
from hashlib import md5 as _md5
except ImportError:
try:
from md5 import md5 as _md5
except ImportError:
# MD5 unavailable. Possibly FIPS mode
_md5 = None
try:
from hashlib import sha256 as _sha256
except ImportError:
pass
try:
from systemd import journal
has_journal = True
except ImportError:
import syslog
has_journal = False
try:
from ast import literal_eval as _literal_eval
except ImportError:
# a replacement for literal_eval that works with python 2.4. from:
# https://mail.python.org/pipermail/python-list/2009-September/551880.html
# which is essentially a cut/past from an earlier (2.6) version of python's
# ast.py
from compiler import parse
from compiler.ast import *
def _literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
and None.
"""
_safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, basestring):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.node
def _convert(node):
if isinstance(node, Const) and isinstance(node.value, (basestring, int, float, long, complex)):
return node.value
elif isinstance(node, Tuple):
return tuple(map(_convert, node.nodes))
elif isinstance(node, List):
return list(map(_convert, node.nodes))
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v in node.items)
elif isinstance(node, Name):
if node.name in _safe_names:
return _safe_names[node.name]
elif isinstance(node, UnarySub):
return -_convert(node.expr)
raise ValueError('malformed string')
return _convert(node_or_string)
FILE_COMMON_ARGUMENTS=dict(
src = dict(),
mode = dict(),
owner = dict(),
group = dict(),
seuser = dict(),
serole = dict(),
selevel = dict(),
setype = dict(),
follow = dict(type='bool', default=False),
# not taken by the file module, but other modules call file so it must ignore them.
content = dict(no_log=True),
backup = dict(),
force = dict(),
remote_src = dict(), # used by assemble
regexp = dict(), # used by assemble
delimiter = dict(), # used by assemble
directory_mode = dict(), # used by copy
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
def get_platform():
''' what's the platform? example: Linux is a platform. '''
return platform.system()
def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
supported_dists = platform._supported_dists + ('arch',)
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
if 'Amazon' in distribution:
distribution = 'Amazon'
else:
distribution = 'OtherLinux'
except:
# FIXME: MethodMissing, I assume?
distribution = platform.dist()[0].capitalize()
else:
distribution = None
return distribution
def get_distribution_version():
''' return the distribution version '''
if platform.system() == 'Linux':
try:
distribution_version = platform.linux_distribution()[1]
if not distribution_version and os.path.isfile('/etc/system-release'):
distribution_version = platform.linux_distribution(supported_dists=['system'])[1]
except:
# FIXME: MethodMissing, I assume?
distribution_version = platform.dist()[1]
else:
distribution_version = None
return distribution_version
def load_platform_subclass(cls, *args, **kwargs):
'''
used by modules like User to have different implementations based on detected platform. See User
module for an example.
'''
this_platform = get_platform()
distribution = get_distribution()
subclass = None
# get the most specific superclass for this platform
if distribution is not None:
for sc in cls.__subclasses__():
if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform:
subclass = sc
if subclass is None:
for sc in cls.__subclasses__():
if sc.platform == this_platform and sc.distribution is None:
subclass = sc
if subclass is None:
subclass = cls
return super(cls, subclass).__new__(subclass)
def json_dict_unicode_to_bytes(d):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, unicode):
return d.encode('utf-8')
elif isinstance(d, dict):
return dict(map(json_dict_unicode_to_bytes, d.iteritems()))
elif isinstance(d, list):
return list(map(json_dict_unicode_to_bytes, d))
elif isinstance(d, tuple):
return tuple(map(json_dict_unicode_to_bytes, d))
else:
return d
def json_dict_bytes_to_unicode(d):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, str):
return unicode(d, 'utf-8')
elif isinstance(d, dict):
return dict(map(json_dict_bytes_to_unicode, d.iteritems()))
elif isinstance(d, list):
return list(map(json_dict_bytes_to_unicode, d))
elif isinstance(d, tuple):
return tuple(map(json_dict_bytes_to_unicode, d))
else:
return d
def heuristic_log_sanitize(data):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
return ''.join(output)
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False, supports_check_mode=False,
required_if=None):
'''
common code for quickly building an ansible module in Python
(although you can write modules in anything that can return JSON)
see library/* for examples
'''
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.no_log = no_log
self.cleanup_files = []
self.aliases = {}
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.iteritems():
if k not in self.argument_spec:
self.argument_spec[k] = v
# check the locale as set by the current environment, and
# reset to LANG=C if it's an invalid/unavailable locale
self._check_locale()
(self.params, self.args) = self._load_params()
self._legal_inputs = ['CHECKMODE', 'NO_LOG']
self.aliases = self._handle_aliases()
if check_invalid_arguments:
self._check_invalid_arguments()
self._check_for_check_mode()
self._check_for_no_log()
# check exclusive early
if not bypass_checks:
self._check_mutually_exclusive(mutually_exclusive)
self._set_defaults(pre=True)
if not bypass_checks:
self._check_required_arguments()
self._check_argument_values()
self._check_argument_types()
self._check_required_together(required_together)
self._check_required_one_of(required_one_of)
self._check_required_if(required_if)
self._set_defaults(pre=False)
if not self.no_log:
self._log_invocation()
# finally, make sure we're in a sane working dir
self._set_cwd()
def load_file_common_arguments(self, params):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
'''
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(path)
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(path):
path = os.path.realpath(path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if not HAVE_SELINUX:
return False
if selinux.is_selinux_mls_enabled() == 1:
return True
else:
return False
def selinux_enabled(self):
if not HAVE_SELINUX:
seenabled = self.get_bin_path('selinuxenabled')
if seenabled is not None:
(rc,out,err) = self.run_command(seenabled)
if rc == 0:
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
return False
if selinux.is_selinux_enabled() == 1:
return True
else:
return False
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
context = [None, None, None]
if self.selinux_mls_enabled():
context.append(None)
return context
def _to_filesystem_str(self, path):
'''Returns filesystem path as a str, if it wasn't already.
Used in selinux interactions because it cannot accept unicode
instances, and specifying complex args in a playbook leaves
you with unicode instances. This method currently assumes
that your filesystem encoding is UTF-8.
'''
if isinstance(path, unicode):
path = path.encode("utf-8")
return path
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(self._to_filesystem_str(path), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(self._to_filesystem_str(path))
except OSError, e:
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, filename):
filename = os.path.expanduser(filename)
st = os.lstat(filename)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
path = os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if path_mount_point == mount_point:
for fs in SELINUX_SPECIAL_FS.split(','):
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
if context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(self._to_filesystem_str(path),
str(':'.join(new_context)))
except OSError:
self.fail_json(path=path, msg='invalid selinux context', new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed):
path = os.path.expanduser(path)
if owner is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if self.check_mode:
return True
try:
os.lchown(path, uid, -1)
except OSError:
self.fail_json(path=path, msg='chown failed')
changed = True
return changed
def set_group_if_different(self, path, group, changed):
path = os.path.expanduser(path)
if group is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if self.check_mode:
return True
try:
os.lchown(path, -1, gid)
except OSError:
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed):
path = os.path.expanduser(path)
path_stat = os.lstat(path)
if mode is None:
return changed
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception, e:
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=str(e))
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(path, mode)
else:
if not os.path.islink(path):
os.chmod(path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(path)
os.chmod(path, mode)
new_underlying_stat = os.stat(path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(path, stat.S_IMODE(underlying_stat.st_mode))
q_stat = os.stat(path)
except OSError, e:
if os.path.islink(path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise e
except Exception, e:
self.fail_json(path=path, msg='chmod failed', details=str(e))
path_stat = os.lstat(path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
new_mode = stat.S_IMODE(path_stat.st_mode)
mode_re = re.compile(r'^(?P<users>[ugoa]+)(?P<operator>[-+=])(?P<perms>[rwxXst]*|[ugo])$')
for mode in symbolic_mode.split(','):
match = mode_re.match(mode)
if match:
users = match.group('users')
operator = match.group('operator')
perms = match.group('perms')
if users == 'a': users = 'ugo'
for user in users:
mode_to_apply = self._get_octal_mode_from_symbolic_perms(path_stat, user, perms)
new_mode = self._apply_operation_to_mode(user, operator, mode_to_apply, new_mode)
else:
raise ValueError("bad symbolic permission for mode: %s" % mode)
return new_mode
def _apply_operation_to_mode(self, user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u': mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g': mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o': mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ 07777
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
def _get_octal_mode_from_symbolic_perms(self, path_stat, user, perms):
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & 00111) > 0
apply_X_permission = is_directory or has_x_permissions
# Permission bits constants documented at:
# http://docs.python.org/2/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH}
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0}
}
user_perms_to_modes = {
'u': {
'r': stat.S_IRUSR,
'w': stat.S_IWUSR,
'x': stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6 },
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
'x': stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3 },
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
'x': stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO }
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed
)
return changed
def set_directory_attributes_if_different(self, file_args, changed):
return self.set_fs_attributes_if_different(file_args, changed)
def set_file_attributes_if_different(self, file_args, changed):
return self.set_fs_attributes_if_different(file_args, changed)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
if os.path.exists(path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(path)
kwargs['mode'] = oct(stat.S_IMODE(st[stat.ST_MODE]))
# secontext not yet supported
if os.path.islink(path):
kwargs['state'] = 'link'
elif os.path.isdir(path):
kwargs['state'] = 'directory'
elif os.stat(path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if HAVE_SELINUX and self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
else:
kwargs['state'] = 'absent'
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error, e:
# fallback to the 'C' locale, which may cause unicode
# issues but is preferable to simply failing because
# of an unknown locale
locale.setlocale(locale.LC_ALL, 'C')
os.environ['LANG'] = 'C'
os.environ['LC_CTYPE'] = 'C'
os.environ['LC_MESSAGES'] = 'C'
except Exception, e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
def _handle_aliases(self):
aliases_results = {} #alias:canon
for (k,v) in self.argument_spec.iteritems():
self._legal_inputs.append(k)
aliases = v.get('aliases', None)
default = v.get('default', None)
required = v.get('required', False)
if default is not None and required:
# not alias specific but this is a good place to check this
self.fail_json(msg="internal error: required and default are mutually exclusive for %s" % k)
if aliases is None:
continue
if type(aliases) != list:
self.fail_json(msg='internal error: aliases must be a list')
for alias in aliases:
self._legal_inputs.append(alias)
aliases_results[alias] = k
if alias in self.params:
self.params[k] = self.params[alias]
return aliases_results
def _check_for_check_mode(self):
for (k,v) in self.params.iteritems():
if k == 'CHECKMODE':
if not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module does not support check mode")
if self.supports_check_mode:
self.check_mode = True
def _check_for_no_log(self):
for (k,v) in self.params.iteritems():
if k == 'NO_LOG':
self.no_log = self.boolean(v)
def _check_invalid_arguments(self):
for (k,v) in self.params.iteritems():
# these should be in legal inputs already
#if k in ('CHECKMODE', 'NO_LOG'):
# continue
if k not in self._legal_inputs:
self.fail_json(msg="unsupported parameter for module: %s" % k)
def _count_terms(self, check):
count = 0
for term in check:
if term in self.params:
count += 1
return count
def _check_mutually_exclusive(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count > 1:
self.fail_json(msg="parameters are mutually exclusive: %s" % check)
def _check_required_one_of(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count == 0:
self.fail_json(msg="one of the following is required: %s" % ','.join(check))
def _check_required_together(self, spec):
if spec is None:
return
for check in spec:
counts = [ self._count_terms([field]) for field in check ]
non_zero = [ c for c in counts if c > 0 ]
if len(non_zero) > 0:
if 0 in counts:
self.fail_json(msg="parameters are required together: %s" % check)
def _check_required_arguments(self):
''' ensure all required arguments are present '''
missing = []
for (k,v) in self.argument_spec.iteritems():
required = v.get('required', False)
if required and k not in self.params:
missing.append(k)
if len(missing) > 0:
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
def _check_required_if(self, spec):
''' ensure that parameters which conditionally required are present '''
if spec is None:
return
for (key, val, requirements) in spec:
missing = []
if key in self.params and self.params[key] == val:
for check in requirements:
count = self._count_terms(check)
if count == 0:
missing.append(check)
if len(missing) > 0:
self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
for (k,v) in self.argument_spec.iteritems():
choices = v.get('choices',None)
if choices is None:
continue
if type(choices) == list:
if k in self.params:
if self.params[k] not in choices:
choices_str=",".join([str(c) for c in choices])
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, self.params[k])
self.fail_json(msg=msg)
else:
self.fail_json(msg="internal error: do not know how to interpret argument_spec")
def safe_eval(self, str, locals=None, include_exceptions=False):
# do not allow method calls to modules
if not isinstance(str, basestring):
# already templated to a datastructure, perhaps?
if include_exceptions:
return (str, None)
return str
if re.search(r'\w\.\w+\(', str):
if include_exceptions:
return (str, None)
return str
# do not allow imports
if re.search(r'import \w+', str):
if include_exceptions:
return (str, None)
return str
try:
result = None
if not locals:
result = _literal_eval(str)
else:
result = _literal_eval(str, None, locals)
if include_exceptions:
return (result, None)
else:
return result
except Exception, e:
if include_exceptions:
return (str, e)
return str
def _check_argument_types(self):
''' ensure all arguments have the requested type '''
for (k, v) in self.argument_spec.iteritems():
wanted = v.get('type', None)
if wanted is None:
continue
if k not in self.params:
continue
value = self.params[k]
is_invalid = False
try:
if wanted == 'str':
if not isinstance(value, basestring):
self.params[k] = str(value)
elif wanted == 'list':
if not isinstance(value, list):
if isinstance(value, basestring):
self.params[k] = value.split(",")
elif isinstance(value, int) or isinstance(value, float):
self.params[k] = [ str(value) ]
else:
is_invalid = True
elif wanted == 'dict':
if not isinstance(value, dict):
if isinstance(value, basestring):
if value.startswith("{"):
try:
self.params[k] = json.loads(value)
except:
(result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
if exc is not None:
self.fail_json(msg="unable to evaluate dictionary for %s" % k)
self.params[k] = result
elif '=' in value:
self.params[k] = dict([x.strip().split("=", 1) for x in value.split(",")])
else:
self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
else:
is_invalid = True
elif wanted == 'bool':
if not isinstance(value, bool):
if isinstance(value, basestring):
self.params[k] = self.boolean(value)
else:
is_invalid = True
elif wanted == 'int':
if not isinstance(value, int):
if isinstance(value, basestring):
self.params[k] = int(value)
else:
is_invalid = True
elif wanted == 'float':
if not isinstance(value, float):
if isinstance(value, basestring):
self.params[k] = float(value)
else:
is_invalid = True
else:
self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
if is_invalid:
self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
except ValueError, e:
self.fail_json(msg="value of argument %s is not of type %s and we were unable to automatically convert" % (k, wanted))
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.iteritems():
default = v.get('default', None)
if pre == True:
# this prevents setting defaults on required items
if default is not None and k not in self.params:
self.params[k] = default
else:
# make sure things without a default still get set None
if k not in self.params:
self.params[k] = default
def _load_params(self):
''' read the input and return a dictionary and the arguments string '''
args = MODULE_ARGS
items = shlex.split(args)
params = {}
for x in items:
try:
(k, v) = x.split("=",1)
except Exception, e:
self.fail_json(msg="this module requires key=value arguments (%s)" % (items))
if k in params:
self.fail_json(msg="duplicate parameter: %s (value=%s)" % (k, v))
params[k] = v
params2 = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
params2.update(params)
return (params2, args)
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
passwd_keys = ['password', 'login_password']
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
elif param in passwd_keys:
log_args[param] = 'NOT_LOGGING_PASSWORD'
else:
param_val = self.params[param]
if not isinstance(param_val, basestring):
param_val = str(param_val)
elif isinstance(param_val, unicode):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val)
module = 'ansible-%s' % os.path.basename(__file__)
msg = []
for arg in log_args:
arg_val = log_args[arg]
if not isinstance(arg_val, basestring):
arg_val = str(arg_val)
elif isinstance(arg_val, unicode):
arg_val = arg_val.encode('utf-8')
msg.append('%s=%s ' % (arg, arg_val))
if msg:
msg = 'Invoked with %s' % ''.join(msg)
else:
msg = 'Invoked'
# 6655 - allow for accented characters
if isinstance(msg, unicode):
# We should never get here as msg should be type str, not unicode
msg = msg.encode('utf-8')
if (has_journal):
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
journal_args.append((arg.upper(), str(log_args[arg])))
try:
journal.send("%s %s" % (module, msg), **dict(journal_args))
except IOError, e:
# fall back to syslog since logging to journal failed
syslog.openlog(str(module), 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_NOTICE, msg) #1
else:
syslog.openlog(str(module), 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_NOTICE, msg) #2
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK|os.R_OK):
raise
return cwd
except:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK|os.R_OK):
os.chdir(cwd)
return cwd
except:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=[]):
'''
find system executable in PATH.
Optional arguments:
- required: if executable is not found and required is true, fail_json
- opt_dirs: optional list of directories to search in addition to PATH
if found return full path; otherwise return None
'''
sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
paths = []
for d in opt_dirs:
if d is not None and os.path.exists(d):
paths.append(d)
paths += os.environ.get('PATH', '').split(os.pathsep)
bin_path = None
# mangle PATH to include /sbin dirs
for p in sbin_paths:
if p not in paths and os.path.exists(p):
paths.append(p)
for d in paths:
path = os.path.join(d, arg)
if os.path.exists(path) and self.is_executable(path):
bin_path = path
break
if required and bin_path is None:
self.fail_json(msg='Failed to find required executable %s' % arg)
return bin_path
def boolean(self, arg):
''' return a bool for the arg '''
if arg is None or type(arg) == bool:
return arg
if type(arg) in types.StringTypes:
arg = arg.lower()
if arg in BOOLEANS_TRUE:
return True
elif arg in BOOLEANS_FALSE:
return False
else:
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
def jsonify(self, data):
for encoding in ("utf-8", "latin-1", "unicode_escape"):
try:
return json.dumps(data, encoding=encoding)
# Old systems using simplejson module does not support encoding keyword.
except TypeError, e:
return json.dumps(data)
except UnicodeDecodeError, e:
continue
self.fail_json(msg='Invalid unicode encoding encountered')
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.add_path_info(kwargs)
if not 'changed' in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
print self.jsonify(kwargs)
sys.exit(0)
def fail_json(self, **kwargs):
''' return from the module, with an error message '''
self.add_path_info(kwargs)
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
kwargs['failed'] = True
self.do_cleanup_files()
print self.jsonify(kwargs)
sys.exit(1)
def is_executable(self, path):
'''is the given path executable?'''
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
def digest_from_file(self, filename, digest_method):
''' Return hex digest of local file for a given digest_method, or None if file is not present. '''
if not os.path.exists(filename):
return None
if os.path.isdir(filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
digest = digest_method
blocksize = 64 * 1024
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
digest.update(block)
block = infile.read(blocksize)
infile.close()
return digest.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if not _md5:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, _md5())
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, _sha1())
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
if not HAVE_HASHLIB:
self.fail_json(msg="SHA-256 checksums require hashlib, which is available in Python 2.5 and higher")
return self.digest_from_file(filename, _sha256())
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename-YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s' % (fn, ext)
try:
shutil.copy2(fn, backupdest)
except (shutil.Error, IOError), e:
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, e))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError, e:
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, e))
def atomic_move(self, src, dest):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
if os.path.exists(dest):
try:
dest_stat = os.stat(dest)
os.chmod(src, dest_stat.st_mode & 07777)
os.chown(src, dest_stat.st_uid, dest_stat.st_gid)
except OSError, e:
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(dest)
try:
login_name = os.getlogin()
except OSError:
# not having a tty can cause the above to fail, so
# just get the LOGNAME environment variable instead
login_name = os.environ.get('LOGNAME', None)
# if the original login_name doesn't match the currently
# logged-in user, or if the SUDO_USER environment variable
# is set, then this user has switched their credentials
switched_user = login_name and login_name != pwd.getpwuid(os.getuid())[0] or os.environ.get('SUDO_USER')
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(src, dest)
except (IOError,OSError), e:
# only try workarounds for errno 18 (cross device), 1 (not permitted) and 13 (permission denied)
if e.errno != errno.EPERM and e.errno != errno.EXDEV and e.errno != errno.EACCES:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
dest_dir = os.path.dirname(dest)
dest_file = os.path.basename(dest)
try:
tmp_dest = tempfile.NamedTemporaryFile(
prefix=".ansible_tmp", dir=dest_dir, suffix=dest_file)
except (OSError, IOError), e:
self.fail_json(msg='The destination directory (%s) is not writable by the current user.' % dest_dir)
try: # leaves tmp file behind when sudo and not root
if switched_user and os.getuid() != 0:
# cleanup will happen by 'rm' of tempdir
# copy2 will preserve some metadata
shutil.copy2(src, tmp_dest.name)
else:
shutil.move(src, tmp_dest.name)
if self.selinux_enabled():
self.set_context_if_different(
tmp_dest.name, context, False)
try:
tmp_stat = os.stat(tmp_dest.name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(tmp_dest.name, dest_stat.st_uid, dest_stat.st_gid)
except OSError, e:
if e.errno != errno.EPERM:
raise
os.rename(tmp_dest.name, dest)
except (shutil.Error, OSError, IOError), e:
self.cleanup(tmp_dest.name)
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(dest, 0666 & ~umask)
if switched_user:
os.chown(dest, os.getuid(), os.getgid())
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None):
'''
Execute a command, returns rc, stdout, and stderr.
args is the command to run
If args is a list, the command will be run with shell=False.
If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
If args is a string and use_unsafe_shell=True it run with shell=True.
Other arguments:
- check_rc (boolean) Whether to call fail_json in case of
non zero RC. Default is False.
- close_fds (boolean) See documentation for subprocess.Popen().
Default is True.
- executable (string) See documentation for subprocess.Popen().
Default is None.
- prompt_regex (string) A regex string (not a compiled regex) which
can be used to detect prompts in the stdout
which would otherwise cause the execution
to hang (especially if no input data is
specified)
'''
shell = False
if isinstance(args, list):
if use_unsafe_shell:
args = " ".join([pipes.quote(x) for x in args])
shell = True
elif isinstance(args, basestring) and use_unsafe_shell:
shell = True
elif isinstance(args, basestring):
args = shlex.split(args.encode('utf-8'))
else:
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
prompt_re = None
if prompt_regex:
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
# expand things like $HOME and ~
if not shell:
args = [ os.path.expandvars(os.path.expanduser(x)) for x in args ]
rc = 0
msg = None
st_in = None
# Set a temporart env path if a prefix is passed
env=os.environ
if path_prefix:
env['PATH']="%s:%s" % (path_prefix, env['PATH'])
# create a printable version of the command for use
# in reporting later, which strips out things like
# passwords from the args list
if isinstance(args, basestring):
if isinstance(args, unicode):
b_args = args.encode('utf-8')
else:
b_args = args
to_clean_args = shlex.split(b_args)
del b_args
else:
to_clean_args = args
clean_args = []
is_passwd = False
for arg in to_clean_args:
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
clean_args.append(heuristic_log_sanitize(arg))
clean_args = ' '.join(pipes.quote(arg) for arg in clean_args)
if data:
st_in = subprocess.PIPE
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
if path_prefix:
kwargs['env'] = env
if cwd and os.path.isdir(cwd):
kwargs['cwd'] = cwd
# store the pwd
prev_dir = os.getcwd()
# make sure we're in the right working directory
if cwd and os.path.isdir(cwd):
try:
os.chdir(cwd)
except (OSError, IOError), e:
self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, str(e)))
try:
cmd = subprocess.Popen(args, **kwargs)
# the communication logic here is essentially taken from that
# of the _communicate() function in ssh.py
stdout = ''
stderr = ''
rpipes = [cmd.stdout, cmd.stderr]
if data:
if not binary_data:
data += '\n'
cmd.stdin.write(data)
cmd.stdin.close()
while True:
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
if cmd.stdout in rfd:
dat = os.read(cmd.stdout.fileno(), 9000)
stdout += dat
if dat == '':
rpipes.remove(cmd.stdout)
if cmd.stderr in rfd:
dat = os.read(cmd.stderr.fileno(), 9000)
stderr += dat
if dat == '':
rpipes.remove(cmd.stderr)
# if we're checking for prompts, do it now
if prompt_re:
if prompt_re.search(stdout) and not data:
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# only break out if no pipes are left to read or
# the pipes are completely read and
# the process is terminated
if (not rpipes or not rfd) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if rpipes is empty
elif not rpipes and cmd.poll() == None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
rc = cmd.returncode
except (OSError, IOError), e:
self.fail_json(rc=e.errno, msg=str(e), cmd=clean_args)
except:
self.fail_json(rc=257, msg=traceback.format_exc(), cmd=clean_args)
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip())
self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)
# reset the pwd
os.chdir(prev_dir)
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def pretty_bytes(self,size):
ranges = (
(1<<70L, 'ZB'),
(1<<60L, 'EB'),
(1<<50L, 'PB'),
(1<<40L, 'TB'),
(1<<30L, 'GB'),
(1<<20L, 'MB'),
(1<<10L, 'KB'),
(1, 'Bytes')
)
for limit, suffix in ranges:
if size >= limit:
break
return '%.2f %s' % (float(size)/ limit, suffix)
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
|
smartfile/django-1.4
|
refs/heads/master
|
django/db/backends/oracle/introspection.py
|
155
|
from django.db.backends import BaseDatabaseIntrospection
import cx_Oracle
import re
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type objects to Django Field types.
data_types_reverse = {
cx_Oracle.CLOB: 'TextField',
cx_Oracle.DATETIME: 'DateField',
cx_Oracle.FIXED_CHAR: 'CharField',
cx_Oracle.NCLOB: 'TextField',
cx_Oracle.NUMBER: 'DecimalField',
cx_Oracle.STRING: 'CharField',
cx_Oracle.TIMESTAMP: 'DateTimeField',
}
try:
data_types_reverse[cx_Oracle.NATIVE_FLOAT] = 'FloatField'
except AttributeError:
pass
try:
data_types_reverse[cx_Oracle.UNICODE] = 'CharField'
except AttributeError:
pass
def get_field_type(self, data_type, description):
# If it's a NUMBER with scale == 0, consider it an IntegerField
if data_type == cx_Oracle.NUMBER and description[5] == 0:
if description[4] > 11:
return 'BigIntegerField'
else:
return 'IntegerField'
else:
return super(DatabaseIntrospection, self).get_field_type(
data_type, description)
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("SELECT TABLE_NAME FROM USER_TABLES")
return [row[0].lower() for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT * FROM %s WHERE ROWNUM < 2" % self.connection.ops.quote_name(table_name))
description = []
for desc in cursor.description:
description.append((desc[0].lower(),) + desc[1:])
return description
def table_name_converter(self, name):
"Table name comparison is case insensitive under Oracle"
return name.lower()
def _name_to_index(self, cursor, table_name):
"""
Returns a dictionary of {field_name: field_index} for the given table.
Indexes are 0-based.
"""
return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))])
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
table_name = table_name.upper()
cursor.execute("""
SELECT ta.column_id - 1, tb.table_name, tb.column_id - 1
FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb,
user_tab_cols ta, user_tab_cols tb
WHERE user_constraints.table_name = %s AND
ta.table_name = user_constraints.table_name AND
ta.column_name = ca.column_name AND
ca.table_name = ta.table_name AND
user_constraints.constraint_name = ca.constraint_name AND
user_constraints.r_constraint_name = cb.constraint_name AND
cb.table_name = tb.table_name AND
cb.column_name = tb.column_name AND
ca.position = cb.position""", [table_name])
relations = {}
for row in cursor.fetchall():
relations[row[0]] = (row[2], row[1].lower())
return relations
def get_indexes(self, cursor, table_name):
sql = """
SELECT LOWER(uic1.column_name) AS column_name,
CASE user_constraints.constraint_type
WHEN 'P' THEN 1 ELSE 0
END AS is_primary_key,
CASE user_indexes.uniqueness
WHEN 'UNIQUE' THEN 1 ELSE 0
END AS is_unique
FROM user_constraints, user_indexes, user_ind_columns uic1
WHERE user_constraints.constraint_type (+) = 'P'
AND user_constraints.index_name (+) = uic1.index_name
AND user_indexes.uniqueness (+) = 'UNIQUE'
AND user_indexes.index_name (+) = uic1.index_name
AND uic1.table_name = UPPER(%s)
AND uic1.column_position = 1
AND NOT EXISTS (
SELECT 1
FROM user_ind_columns uic2
WHERE uic2.index_name = uic1.index_name
AND uic2.column_position = 2
)
"""
cursor.execute(sql, [table_name])
indexes = {}
for row in cursor.fetchall():
indexes[row[0]] = {'primary_key': bool(row[1]),
'unique': bool(row[2])}
return indexes
|
kmolab/kmolab.github.io
|
refs/heads/master
|
data/Brython-3.3.4/Lib/test/test_docxmlrpc.py
|
23
|
from xmlrpc.server import DocXMLRPCServer
import http.client
import sys
from test import support
threading = support.import_module('threading')
import time
import socket
import unittest
PORT = None
def make_request_and_skipIf(condition, reason):
# If we skip the test, we have to make a request because the
# the server created in setUp blocks expecting one to come in.
if not condition:
return lambda func: func
def decorator(func):
def make_request_and_skip(self):
self.client.request("GET", "/")
self.client.getresponse()
raise unittest.SkipTest(reason)
return make_request_and_skip
return decorator
def server(evt, numrequests):
serv = DocXMLRPCServer(("localhost", 0), logRequests=False)
try:
global PORT
PORT = serv.socket.getsockname()[1]
# Add some documentation
serv.set_server_title("DocXMLRPCServer Test Documentation")
serv.set_server_name("DocXMLRPCServer Test Docs")
serv.set_server_documentation(
"This is an XML-RPC server's documentation, but the server "
"can be used by POSTing to /RPC2. Try self.add, too.")
# Create and register classes and functions
class TestClass(object):
def test_method(self, arg):
"""Test method's docs. This method truly does very little."""
self.arg = arg
serv.register_introspection_functions()
serv.register_instance(TestClass())
def add(x, y):
"""Add two instances together. This follows PEP008, but has nothing
to do with RFC1952. Case should matter: pEp008 and rFC1952. Things
that start with http and ftp should be auto-linked, too:
http://google.com.
"""
return x + y
def annotation(x: int):
""" Use function annotations. """
return x
class ClassWithAnnotation:
def method_annotation(self, x: bytes):
return x.decode()
serv.register_function(add)
serv.register_function(lambda x, y: x-y)
serv.register_function(annotation)
serv.register_instance(ClassWithAnnotation())
while numrequests > 0:
serv.handle_request()
numrequests -= 1
except socket.timeout:
pass
finally:
serv.server_close()
PORT = None
evt.set()
class DocXMLRPCHTTPGETServer(unittest.TestCase):
def setUp(self):
self._threads = support.threading_setup()
# Enable server feedback
DocXMLRPCServer._send_traceback_header = True
self.evt = threading.Event()
threading.Thread(target=server, args=(self.evt, 1)).start()
# wait for port to be assigned
n = 1000
while n > 0 and PORT is None:
time.sleep(0.001)
n -= 1
self.client = http.client.HTTPConnection("localhost:%d" % PORT)
def tearDown(self):
self.client.close()
self.evt.wait()
# Disable server feedback
DocXMLRPCServer._send_traceback_header = False
support.threading_cleanup(*self._threads)
def test_valid_get_response(self):
self.client.request("GET", "/")
response = self.client.getresponse()
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Content-type"), "text/html")
# Server raises an exception if we don't start to read the data
response.read()
def test_invalid_get_response(self):
self.client.request("GET", "/spam")
response = self.client.getresponse()
self.assertEqual(response.status, 404)
self.assertEqual(response.getheader("Content-type"), "text/plain")
response.read()
def test_lambda(self):
"""Test that lambda functionality stays the same. The output produced
currently is, I suspect invalid because of the unencoded brackets in the
HTML, "<lambda>".
The subtraction lambda method is tested.
"""
self.client.request("GET", "/")
response = self.client.getresponse()
self.assertIn((b'<dl><dt><a name="-<lambda>"><strong>'
b'<lambda></strong></a>(x, y)</dt></dl>'),
response.read())
@make_request_and_skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_autolinking(self):
"""Test that the server correctly automatically wraps references to
PEPS and RFCs with links, and that it linkifies text starting with
http or ftp protocol prefixes.
The documentation for the "add" method contains the test material.
"""
self.client.request("GET", "/")
response = self.client.getresponse().read()
self.assertIn(
(b'<dl><dt><a name="-add"><strong>add</strong></a>(x, y)</dt><dd>'
b'<tt>Add two instances together. This '
b'follows <a href="http://www.python.org/dev/peps/pep-0008/">'
b'PEP008</a>, but has nothing<br>\nto do '
b'with <a href="http://www.rfc-editor.org/rfc/rfc1952.txt">'
b'RFC1952</a>. Case should matter: pEp008 '
b'and rFC1952. Things<br>\nthat start '
b'with http and ftp should be '
b'auto-linked, too:<br>\n<a href="http://google.com">'
b'http://google.com</a>.</tt></dd></dl>'), response)
@make_request_and_skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_system_methods(self):
"""Test the precense of three consecutive system.* methods.
This also tests their use of parameter type recognition and the
systems related to that process.
"""
self.client.request("GET", "/")
response = self.client.getresponse().read()
self.assertIn(
(b'<dl><dt><a name="-system.methodHelp"><strong>system.methodHelp'
b'</strong></a>(method_name)</dt><dd><tt><a href="#-system.method'
b'Help">system.methodHelp</a>(\'add\') => "Adds '
b'two integers together"<br>\n <br>\nReturns a'
b' string containing documentation for '
b'the specified method.</tt></dd></dl>\n<dl><dt><a name'
b'="-system.methodSignature"><strong>system.methodSignature</strong>'
b'</a>(method_name)</dt><dd><tt><a href="#-system.methodSignature">'
b'system.methodSignature</a>(\'add\') => [double, '
b'int, int]<br>\n <br>\nReturns a list '
b'describing the signature of the method.'
b' In the<br>\nabove example, the add '
b'method takes two integers as arguments'
b'<br>\nand returns a double result.<br>\n '
b'<br>\nThis server does NOT support system'
b'.methodSignature.</tt></dd></dl>'), response)
def test_autolink_dotted_methods(self):
"""Test that selfdot values are made strong automatically in the
documentation."""
self.client.request("GET", "/")
response = self.client.getresponse()
self.assertIn(b"""Try self.<strong>add</strong>, too.""",
response.read())
def test_annotations(self):
""" Test that annotations works as expected """
self.client.request("GET", "/")
response = self.client.getresponse()
self.assertIn(
(b'<dl><dt><a name="-annotation"><strong>annotation</strong></a>'
b'(x: int)</dt><dd><tt>Use function annotations.</tt>'
b'</dd></dl>\n<dl><dt><a name="-method_annotation"><strong>'
b'method_annotation</strong></a>(x: bytes)</dt></dl>'),
response.read())
def test_main():
support.run_unittest(DocXMLRPCHTTPGETServer)
if __name__ == '__main__':
test_main()
|
tingtingths/PyPiePlayer
|
refs/heads/master
|
docker/conf/config.py
|
1
|
# configure here
# sha-256, default: password
#users = {"user": "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8"}
#cert_file = ""
#pkey_file = ""
#PORT = 4343
#DEBUG = False
# the above config doesn't matter if we run the app with uwsgi
library_path = "/music" # Do no modify this
x_accel_enabled = True # whether to delegate the stream handling to nginx
|
cenobites/flask-jsonrpc
|
refs/heads/master
|
setup.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2020, Cenobit Technologies, Inc. http://cenobit.es/
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the Cenobit Technologies nor the names of
# its contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import setuptools
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='Flask-JSONRPC',
version='1.1.0',
url='https://github.com/cenobites/flask-jsonrpc',
license='New BSD License',
author='Nycholas de Oliveira e Oliveira',
author_email='nycholas@gmail.com',
description='Adds JSONRPC support to Flask.',
long_description=long_description,
long_description_content_type='text/markdown',
packages=setuptools.find_packages(),
zip_safe=False,
include_package_data=True,
platforms='any',
python_requires='>= 3.6',
install_requires=[
'Flask>=1.0.0',
'typeguard',
'typing;python_version<"3.5"',
'typing_extensions;python_version<"3.8"',
],
setup_requires=['pytest-runner'],
tests_require=['mock', 'coverage', 'pytest', 'pytest-cov', 'pytest-sugar', 'typeguard'],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
CarlosUrda/Curso-Python-DevcodeLa
|
refs/heads/master
|
3enraya/3enraya.py
|
1
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '3enraya.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import re, random
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_wPrincipal(QtGui.QWidget):
def setupUi(self, wPrincipal):
wPrincipal.setObjectName(_fromUtf8("wPrincipal"))
wPrincipal.setWindowModality(QtCore.Qt.NonModal)
wPrincipal.setEnabled(True)
wPrincipal.resize(680, 418)
wPrincipal.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
wPrincipal.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 255);"))
self.lPuntuacion = QtGui.QLabel(wPrincipal)
self.lPuntuacion.setGeometry(QtCore.QRect(430, 20, 175, 31))
font = QtGui.QFont()
font.setPointSize(22)
font.setBold(True)
font.setUnderline(True)
font.setWeight(75)
self.lPuntuacion.setFont(font)
self.lPuntuacion.setObjectName(_fromUtf8("lPuntuacion"))
self.bReiniciar = QtGui.QPushButton(wPrincipal)
self.bReiniciar.setGeometry(QtCore.QRect(460, 310, 131, 41))
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.bReiniciar.setFont(font)
self.bReiniciar.setStyleSheet(_fromUtf8("background-color: rgb(255, 255, 0);"))
self.bReiniciar.setObjectName(_fromUtf8("bReiniciar"))
self.layoutWidget = QtGui.QWidget(wPrincipal)
self.layoutWidget.setGeometry(QtCore.QRect(10, 10, 361, 301))
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.glTablero = QtGui.QGridLayout(self.layoutWidget)
self.glTablero.setObjectName(_fromUtf8("glTablero"))
self.b1_2 = QtGui.QPushButton(self.layoutWidget)
self.b1_2.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b1_2.sizePolicy().hasHeightForWidth())
self.b1_2.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b1_2.setFont(font)
self.b1_2.setFocusPolicy(QtCore.Qt.NoFocus)
self.b1_2.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b1_2.setText(_fromUtf8(""))
self.b1_2.setObjectName(_fromUtf8("b1_2"))
self.glTablero.addWidget(self.b1_2, 1, 2, 1, 1)
self.b2_2 = QtGui.QPushButton(self.layoutWidget)
self.b2_2.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b2_2.sizePolicy().hasHeightForWidth())
self.b2_2.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b2_2.setFont(font)
self.b2_2.setFocusPolicy(QtCore.Qt.NoFocus)
self.b2_2.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b2_2.setText(_fromUtf8(""))
self.b2_2.setObjectName(_fromUtf8("b2_2"))
self.glTablero.addWidget(self.b2_2, 2, 2, 1, 1)
self.b1_0 = QtGui.QPushButton(self.layoutWidget)
self.b1_0.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b1_0.sizePolicy().hasHeightForWidth())
self.b1_0.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b1_0.setFont(font)
self.b1_0.setFocusPolicy(QtCore.Qt.NoFocus)
self.b1_0.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b1_0.setText(_fromUtf8(""))
self.b1_0.setObjectName(_fromUtf8("b1_0"))
self.glTablero.addWidget(self.b1_0, 1, 0, 1, 1)
self.b0_2 = QtGui.QPushButton(self.layoutWidget)
self.b0_2.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b0_2.sizePolicy().hasHeightForWidth())
self.b0_2.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b0_2.setFont(font)
self.b0_2.setFocusPolicy(QtCore.Qt.NoFocus)
self.b0_2.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b0_2.setText(_fromUtf8(""))
self.b0_2.setObjectName(_fromUtf8("b0_2"))
self.glTablero.addWidget(self.b0_2, 0, 2, 1, 1)
self.b2_1 = QtGui.QPushButton(self.layoutWidget)
self.b2_1.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b2_1.sizePolicy().hasHeightForWidth())
self.b2_1.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b2_1.setFont(font)
self.b2_1.setFocusPolicy(QtCore.Qt.NoFocus)
self.b2_1.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b2_1.setText(_fromUtf8(""))
self.b2_1.setObjectName(_fromUtf8("b2_1"))
self.glTablero.addWidget(self.b2_1, 2, 1, 1, 1)
self.b0_0 = QtGui.QPushButton(self.layoutWidget)
self.b0_0.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b0_0.sizePolicy().hasHeightForWidth())
self.b0_0.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b0_0.setFont(font)
self.b0_0.setFocusPolicy(QtCore.Qt.NoFocus)
self.b0_0.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b0_0.setText(_fromUtf8(""))
self.b0_0.setObjectName(_fromUtf8("b0_0"))
self.glTablero.addWidget(self.b0_0, 0, 0, 1, 1)
self.b2_0 = QtGui.QPushButton(self.layoutWidget)
self.b2_0.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b2_0.sizePolicy().hasHeightForWidth())
self.b2_0.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b2_0.setFont(font)
self.b2_0.setFocusPolicy(QtCore.Qt.NoFocus)
self.b2_0.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b2_0.setText(_fromUtf8(""))
self.b2_0.setObjectName(_fromUtf8("b2_0"))
self.glTablero.addWidget(self.b2_0, 2, 0, 1, 1)
self.b0_1 = QtGui.QPushButton(self.layoutWidget)
self.b0_1.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b0_1.sizePolicy().hasHeightForWidth())
self.b0_1.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b0_1.setFont(font)
self.b0_1.setFocusPolicy(QtCore.Qt.NoFocus)
self.b0_1.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b0_1.setText(_fromUtf8(""))
self.b0_1.setObjectName(_fromUtf8("b0_1"))
self.glTablero.addWidget(self.b0_1, 0, 1, 1, 1)
self.b1_1 = QtGui.QPushButton(self.layoutWidget)
self.b1_1.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.b1_1.sizePolicy().hasHeightForWidth())
self.b1_1.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(60)
font.setBold(True)
font.setWeight(75)
self.b1_1.setFont(font)
self.b1_1.setFocusPolicy(QtCore.Qt.NoFocus)
self.b1_1.setStyleSheet(_fromUtf8("color:rgb(255, 255, 255);\n"
"background-color:rgb(8, 50, 255);\n"
"\n"
""))
self.b1_1.setText(_fromUtf8(""))
self.b1_1.setObjectName(_fromUtf8("b1_1"))
self.glTablero.addWidget(self.b1_1, 1, 1, 1, 1)
self.layoutWidget1 = QtGui.QWidget(wPrincipal)
self.layoutWidget1.setGeometry(QtCore.QRect(70, 370, 271, 41))
self.layoutWidget1.setObjectName(_fromUtf8("layoutWidget1"))
self.lEleccionMaquina = QtGui.QHBoxLayout(self.layoutWidget1)
self.lEleccionMaquina.setObjectName(_fromUtf8("lEleccionMaquina"))
self.cbMaquina2 = QtGui.QCheckBox(self.layoutWidget1)
font = QtGui.QFont()
font.setPointSize(12)
self.cbMaquina2.setFont(font)
self.cbMaquina2.setObjectName(_fromUtf8("cbMaquina2"))
self.lEleccionMaquina.addWidget(self.cbMaquina2)
self.cbMaquina1 = QtGui.QCheckBox(self.layoutWidget1)
font = QtGui.QFont()
font.setPointSize(12)
self.cbMaquina1.setFont(font)
self.cbMaquina1.setTristate(False)
self.cbMaquina1.setObjectName(_fromUtf8("cbMaquina1"))
self.lEleccionMaquina.addWidget(self.cbMaquina1)
self.eTurno = QtGui.QLabel(wPrincipal)
self.eTurno.setGeometry(QtCore.QRect(85, 331, 48, 21))
font = QtGui.QFont()
font.setPointSize(15)
self.eTurno.setFont(font)
self.eTurno.setObjectName(_fromUtf8("eTurno"))
self.layoutWidget2 = QtGui.QWidget(wPrincipal)
self.layoutWidget2.setGeometry(QtCore.QRect(391, 90, 271, 171))
self.layoutWidget2.setObjectName(_fromUtf8("layoutWidget2"))
self.glPuntuacion = QtGui.QGridLayout(self.layoutWidget2)
self.glPuntuacion.setObjectName(_fromUtf8("glPuntuacion"))
self.eJugador1 = QtGui.QLabel(self.layoutWidget2)
font = QtGui.QFont()
font.setPointSize(15)
self.eJugador1.setFont(font)
self.eJugador1.setObjectName(_fromUtf8("eJugador1"))
self.glPuntuacion.addWidget(self.eJugador1, 0, 0, 1, 1)
self.lcdJugador1 = QtGui.QLCDNumber(self.layoutWidget2)
font = QtGui.QFont()
font.setPointSize(11)
self.lcdJugador1.setFont(font)
self.lcdJugador1.setProperty("value", 0.0)
self.lcdJugador1.setObjectName(_fromUtf8("lcdJugador1"))
self.glPuntuacion.addWidget(self.lcdJugador1, 0, 1, 1, 1)
self.eJugador2 = QtGui.QLabel(self.layoutWidget2)
font = QtGui.QFont()
font.setPointSize(15)
self.eJugador2.setFont(font)
self.eJugador2.setObjectName(_fromUtf8("eJugador2"))
self.glPuntuacion.addWidget(self.eJugador2, 1, 0, 1, 1)
self.lcdJugador2 = QtGui.QLCDNumber(self.layoutWidget2)
self.lcdJugador2.setObjectName(_fromUtf8("lcdJugador2"))
self.glPuntuacion.addWidget(self.lcdJugador2, 1, 1, 1, 1)
self.eEmpate = QtGui.QLabel(self.layoutWidget2)
font = QtGui.QFont()
font.setPointSize(15)
self.eEmpate.setFont(font)
self.eEmpate.setObjectName(_fromUtf8("eEmpate"))
self.glPuntuacion.addWidget(self.eEmpate, 2, 0, 1, 1)
self.lcdEmpate = QtGui.QLCDNumber(self.layoutWidget2)
self.lcdEmpate.setObjectName(_fromUtf8("lcdEmpate"))
self.glPuntuacion.addWidget(self.lcdEmpate, 2, 1, 1, 1)
self.leTurno = QtGui.QLineEdit(wPrincipal)
self.leTurno.setGeometry(QtCore.QRect(154, 330, 113, 26))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setItalic(True)
font.setWeight(75)
self.leTurno.setFont(font)
self.leTurno.setText(_fromUtf8(""))
self.leTurno.setObjectName(_fromUtf8("leTurno"))
self.leMensaje = QtGui.QLineEdit(wPrincipal)
self.leMensaje.setGeometry(QtCore.QRect(432, 370, 191, 31))
font = QtGui.QFont()
font.setPointSize(15)
self.leMensaje.setFont(font)
self.leMensaje.setFrame(False)
self.leMensaje.setObjectName(_fromUtf8("leMensaje"))
self.layoutWidget.raise_()
self.layoutWidget.raise_()
self.layoutWidget.raise_()
self.lPuntuacion.raise_()
self.bReiniciar.raise_()
self.eTurno.raise_()
self.leTurno.raise_()
self.leMensaje.raise_()
self.retranslateUi(wPrincipal)
QtCore.QMetaObject.connectSlotsByName(wPrincipal)
self.__botonesTablero = [self.b0_0, self.b0_1, self.b0_2,
self.b1_0, self.b1_1, self.b1_2,
self.b2_0, self.b2_1, self.b2_2]
self.__victorias1 = 0
self.__victorias2 = 0
self.__empates = 0
for boton in self.__botonesTablero:
QtCore.QObject.connect( boton, QtCore.SIGNAL(_fromUtf8("clicked()")),
self.pincharBotonTablero)
QtCore.QObject.connect( self.cbMaquina1,
QtCore.SIGNAL(_fromUtf8("stateChanged(int)")),
self.comprobarTurnoMaquina)
QtCore.QObject.connect( self.cbMaquina2,
QtCore.SIGNAL(_fromUtf8("stateChanged(int)")),
self.comprobarTurnoMaquina)
QtCore.QObject.connect( self.bReiniciar, QtCore.SIGNAL(_fromUtf8("clicked()")),
self.pincharBotonReiniciar)
self.iniciarPartida()
def pincharBotonTablero( self):
if not self.cambiarCasilla( self.sender()):
self.leMensaje.setText( _fromUtf8("¡Movimiento incorrecto!"))
self.comprobarTurnoMaquina()
def cambiarCasilla( self, boton):
if self.__movimiento == 9: return False
posicion = re.search( "b(\d+)_(\d+)", boton.objectName())
fila = int( posicion.group(1))
columna = int( posicion.group(2))
if self.__tablero[fila][columna] != 0: return False
boton.setText( "X" if self.__turno == 1 else "O")
boton.setEnabled( False)
self.__tablero[fila][columna] = self.__turno
self.__movimiento += 1
ganador = self.comprobarFinal( fila, columna)
if ganador == 3:
self.__turno = 1 if self.__turno == 2 else 2
self.leTurno.setText("Jugador 1" if self.__turno==1 else "Jugador 2")
self.leMensaje.setText( "")
return True
elif ganador == 1 or ganador == 2:
txtGanador = str( ganador)
self.__dict__["_Ui_wPrincipal__victorias" + txtGanador] += 1
self.__dict__["lcdJugador" + txtGanador].display(
self.__dict__["_Ui_wPrincipal__victorias" + txtGanador])
self.leMensaje.setText(_fromUtf8("¡Vence Jugador "+txtGanador+"!"))
elif ganador == 0:
self.__empates += 1
self.lcdEmpate.display( self.__empates)
self.leMensaje.setText( _fromUtf8("¡Empate!"))
self.finalizarPartida()
return True
def finalizarPartida( self):
for boton in self.__botonesTablero:
if boton.isEnabled(): boton.setEnabled( False)
self.__movimiento = 9
self.leTurno.setText( "")
def pincharBotonReiniciar( self):
self.iniciarPartida()
def iniciarPartida( self):
self.__turno = 1
color = _fromUtf8( "color:rgb(255,255,255); background:rgb(8, 50, 255)")
for boton in self.__botonesTablero:
boton.setStyleSheet( color)
boton.setText( " ")
boton.setEnabled( True)
self.__tablero = [[0]*3,[0]*3,[0]*3]
self.__movimiento = 0
self.leTurno.setText( "Jugador 1" if self.__turno == 1 else "Jugador 2")
self.leMensaje.setText( "")
self.comprobarTurnoMaquina()
def comprobarTurnoMaquina( self):
while self.__movimiento < 9:
if not self.__dict__["cbMaquina" + str( self.__turno)].isChecked():
break
(fila, columna) = self.generarMovimiento()
self.cambiarCasilla( self.__dict__["b"+str(fila)+"_"+str(columna)])
def generarMovimiento( self):
return (random.randint(0,2), random.randint(0,2))
def comprobarFinal( self, fil, col):
color = _fromUtf8( "color:rgb(5, 5, 255); background:rgb(228, 0, 5);")
if (self.__tablero[fil][col] == self.__tablero[(fil+1)%3][col] and
self.__tablero[fil][col] == self.__tablero[(fil+2)%3][col]):
self.__dict__["b"+str(fil)+"_"+str(col)].setStyleSheet(color)
self.__dict__["b"+str((fil+1)%3)+"_"+str(col)].setStyleSheet(color)
self.__dict__["b"+str((fil+2)%3)+"_"+str(col)].setStyleSheet(color)
return self.__turno
if (self.__tablero[fil][col] == self.__tablero[fil][(col+1)%3] and
self.__tablero[fil][col] == self.__tablero[fil][(col+2)%3]):
self.__dict__["b"+str(fil)+"_"+str(col)].setStyleSheet(color)
self.__dict__["b"+str(fil)+"_"+str((col+1)%3)].setStyleSheet(color)
self.__dict__["b"+str(fil)+"_"+str((col+2)%3)].setStyleSheet(color)
return self.__turno
if (fil == col and
self.__tablero[fil][col] == self.__tablero[(fil+1)%3][(col+1)%3] and
self.__tablero[fil][col] == self.__tablero[(fil+2)%3][(col+2)%3]):
self.__dict__["b"+str(fil)+"_"+str(col)].setStyleSheet(color)
self.__dict__["b"+str((fil+1)%3)+"_"+str((col+1)%3)].setStyleSheet(color)
self.__dict__["b"+str((fil+2)%3)+"_"+str((col+2)%3)].setStyleSheet(color)
return self.__turno
if (abs(fil-col) == 2 and
self.__tablero[fil][col] == self.__tablero[(fil+1)%3][(col-1)%3] and
self.__tablero[fil][col] == self.__tablero[(fil+2)%3][(col-2)%3]):
self.__dict__["b"+str(fil)+"_"+str(col)].setStyleSheet(color)
self.__dict__["b"+str((fil+1)%3)+"_"+str((col-1)%3)].setStyleSheet(color)
self.__dict__["b"+str((fil+2)%3)+"_"+str((col-2)%3)].setStyleSheet(color)
return self.__turno
if self.__movimiento == 9: return 0
return 3
def retranslateUi(self, wPrincipal):
wPrincipal.setWindowTitle(_translate("wPrincipal", "3 en Raya", None))
self.lPuntuacion.setText(_translate("wPrincipal", "PUNTUACIÓN", None))
self.bReiniciar.setText(_translate("wPrincipal", "Reiniciar", None))
self.cbMaquina2.setText(_translate("wPrincipal", "J2 Máquina", None))
self.cbMaquina1.setText(_translate("wPrincipal", "J1 Máquina", None))
self.eTurno.setText(_translate("wPrincipal", "Turno", None))
self.eJugador1.setText(_translate("wPrincipal", "Jugador 1", None))
self.eJugador2.setText(_translate("wPrincipal", "Jugador 2", None))
self.eEmpate.setText(_translate("wPrincipal", "Empate", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
wPrincipal = QtGui.QWidget()
ui = Ui_wPrincipal()
ui.setupUi(wPrincipal)
wPrincipal.show()
sys.exit(app.exec_())
|
p0cisk/Quantum-GIS
|
refs/heads/master
|
python/plugins/processing/core/ProcessingLog.py
|
2
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ProcessingLog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import range
from builtins import object
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import codecs
import datetime
from processing.tools.system import userFolder
from processing.core.ProcessingConfig import ProcessingConfig
from qgis.core import QgsMessageLog
from qgis.PyQt.QtCore import QCoreApplication
class ProcessingLog(object):
LOG_ERROR = 'ERROR'
LOG_INFO = 'INFO'
LOG_WARNING = 'WARNING'
LOG_ALGORITHM = 'ALGORITHM'
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
recentAlgs = []
@staticmethod
def logFilename():
logFilename = userFolder() + os.sep + 'processing.log'
if not os.path.isfile(logFilename):
logfile = codecs.open(logFilename, 'w', encoding='utf-8')
logfile.write('Started logging at ' +
datetime.datetime.now().strftime(ProcessingLog.DATE_FORMAT) + '\n')
logfile.close()
return logFilename
@staticmethod
def addToLog(msgtype, msg):
try:
# It seems that this fails sometimes depending on the msg
# added. To avoid it stopping the normal functioning of the
# algorithm, we catch all errors, assuming that is better
# to miss some log info than breaking the algorithm.
if msgtype == ProcessingLog.LOG_ALGORITHM:
line = msgtype + '|' + datetime.datetime.now().strftime(
ProcessingLog.DATE_FORMAT) + '|' \
+ msg + '\n'
logfile = codecs.open(ProcessingLog.logFilename(), 'a',
encoding='utf-8')
logfile.write(line)
logfile.close()
algname = msg[len('Processing.runalg("'):]
algname = algname[:algname.index('"')]
if algname not in ProcessingLog.recentAlgs:
ProcessingLog.recentAlgs.append(algname)
recentAlgsString = ';'.join(ProcessingLog.recentAlgs[-6:])
ProcessingConfig.setSettingValue(
ProcessingConfig.RECENT_ALGORITHMS,
recentAlgsString)
else:
if isinstance(msg, list):
msg = '\n'.join([m for m in msg])
msgtypes = {ProcessingLog.LOG_ERROR: QgsMessageLog.CRITICAL,
ProcessingLog.LOG_INFO: QgsMessageLog.INFO,
ProcessingLog.LOG_WARNING: QgsMessageLog.WARNING, }
QgsMessageLog.logMessage(msg, ProcessingLog.tr("Processing"), msgtypes[msgtype])
except:
pass
@staticmethod
def getLogEntries():
entries = {}
errors = []
algorithms = []
warnings = []
info = []
with open(ProcessingLog.logFilename()) as f:
lines = f.readlines()
for line in lines:
line = line.strip('\n').strip()
tokens = line.split('|')
text = ''
for i in range(2, len(tokens)):
text += tokens[i] + '|'
if line.startswith(ProcessingLog.LOG_ERROR):
errors.append(LogEntry(tokens[1], text))
elif line.startswith(ProcessingLog.LOG_ALGORITHM):
algorithms.append(LogEntry(tokens[1], tokens[2]))
elif line.startswith(ProcessingLog.LOG_WARNING):
warnings.append(LogEntry(tokens[1], text))
elif line.startswith(ProcessingLog.LOG_INFO):
info.append(LogEntry(tokens[1], text))
entries[ProcessingLog.LOG_ALGORITHM] = algorithms
return entries
@staticmethod
def getRecentAlgorithms():
recentAlgsSetting = ProcessingConfig.getSetting(
ProcessingConfig.RECENT_ALGORITHMS)
try:
ProcessingLog.recentAlgs = recentAlgsSetting.split(';')
except:
pass
return ProcessingLog.recentAlgs
@staticmethod
def clearLog():
os.unlink(ProcessingLog.logFilename())
@staticmethod
def saveLog(fileName):
entries = ProcessingLog.getLogEntries()
with codecs.open(fileName, 'w', encoding='utf-8') as f:
for k, v in list(entries.items()):
for entry in v:
f.write('%s|%s|%s\n' % (k, entry.date, entry.text))
@staticmethod
def tr(string, context=''):
if context == '':
context = 'ProcessingLog'
return QCoreApplication.translate(context, string)
class LogEntry(object):
def __init__(self, date, text):
self.date = date
self.text = text
|
JingZhou0404/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checkers/jsonchecker_unittest.py
|
124
|
# Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for jsonchecker.py."""
import unittest2 as unittest
import jsonchecker
class MockErrorHandler(object):
def __init__(self, handle_style_error):
self.turned_off_filtering = False
self._handle_style_error = handle_style_error
def turn_off_line_filtering(self):
self.turned_off_filtering = True
def __call__(self, line_number, category, confidence, message):
self._handle_style_error(self, line_number, category, confidence, message)
return True
class JSONCheckerTest(unittest.TestCase):
"""Tests JSONChecker class."""
def test_line_number_from_json_exception(self):
tests = (
(0, 'No JSON object could be decoded'),
(2, 'Expecting property name: line 2 column 1 (char 2)'),
(3, 'Expecting object: line 3 column 1 (char 15)'),
(9, 'Expecting property name: line 9 column 21 (char 478)'),
)
for expected_line, message in tests:
self.assertEqual(expected_line, jsonchecker.JSONChecker.line_number_from_json_exception(ValueError(message)))
def assert_no_error(self, json_data):
def handle_style_error(mock_error_handler, line_number, category, confidence, message):
self.fail('Unexpected error: %d %s %d %s' % (line_number, category, confidence, message))
error_handler = MockErrorHandler(handle_style_error)
checker = jsonchecker.JSONChecker('foo.json', error_handler)
checker.check(json_data.split('\n'))
self.assertTrue(error_handler.turned_off_filtering)
def assert_error(self, expected_line_number, expected_category, json_data):
def handle_style_error(mock_error_handler, line_number, category, confidence, message):
mock_error_handler.had_error = True
self.assertEqual(expected_line_number, line_number)
self.assertEqual(expected_category, category)
self.assertIn(category, jsonchecker.JSONChecker.categories)
error_handler = MockErrorHandler(handle_style_error)
error_handler.had_error = False
checker = jsonchecker.JSONChecker('foo.json', error_handler)
checker.check(json_data.split('\n'))
self.assertTrue(error_handler.had_error)
self.assertTrue(error_handler.turned_off_filtering)
def mock_handle_style_error(self):
pass
def test_conflict_marker(self):
self.assert_error(0, 'json/syntax', '<<<<<<< HEAD\n{\n}\n')
def test_single_quote(self):
self.assert_error(2, 'json/syntax', "{\n'slaves': []\n}\n")
def test_init(self):
error_handler = MockErrorHandler(self.mock_handle_style_error)
checker = jsonchecker.JSONChecker('foo.json', error_handler)
self.assertEqual(checker._handle_style_error, error_handler)
def test_no_error(self):
self.assert_no_error("""{
"slaves": [ { "name": "test-slave", "platform": "*" },
{ "name": "apple-xserve-4", "platform": "mac-snowleopard" }
],
"builders": [ { "name": "SnowLeopard Intel Release (Build)", "type": "Build", "builddir": "snowleopard-intel-release",
"platform": "mac-snowleopard", "configuration": "release", "architectures": ["x86_64"],
"slavenames": ["apple-xserve-4"]
}
],
"schedulers": [ { "type": "PlatformSpecificScheduler", "platform": "mac-snowleopard", "branch": "trunk", "treeStableTimer": 45.0,
"builderNames": ["SnowLeopard Intel Release (Build)", "SnowLeopard Intel Debug (Build)"]
}
]
}
""")
|
EaseCloud/wechatpy
|
refs/heads/master
|
wechatpy/enterprise/client/api/menu.py
|
12
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from wechatpy.client.api.base import BaseWeChatAPI
from wechatpy.exceptions import WeChatClientException
class WeChatMenu(BaseWeChatAPI):
def create(self, agent_id, menu_data):
return self._post(
'menu/create',
params={
'agentid': agent_id
},
data=menu_data
)
def get(self, agent_id):
try:
return self._get(
'menu/get',
params={
'agentid': agent_id
}
)
except WeChatClientException as e:
if e.errcode == 46003:
# menu not exist
return None
else:
raise e
def delete(self, agent_id):
return self._get(
'menu/delete',
params={
'agentid': agent_id
}
)
def update(self, agent_id, menu_data):
self.delete(agent_id)
return self.create(agent_id, menu_data)
|
ivelum/djangoql
|
refs/heads/master
|
test_project/core/tests/test_parser.py
|
1
|
# -*- coding: utf-8 -*-
import unittest.util
from unittest import TestCase
from djangoql.ast import Comparison, Const, Expression, List, Logical, Name
from djangoql.exceptions import DjangoQLParserError
from djangoql.parser import DjangoQLParser
# Show full contents in assertions when comparing long text strings
unittest.util._MAX_LENGTH = 2000
class DjangoQLParseTest(TestCase):
parser = DjangoQLParser()
def test_comparisons(self):
self.assertEqual(
Expression(Name('age'), Comparison('>='), Const(18)),
self.parser.parse('age >= 18'),
)
self.assertEqual(
Expression(Name('gender'), Comparison('='), Const('female')),
self.parser.parse('gender = "female"'),
)
self.assertEqual(
Expression(Name('name'), Comparison('!='), Const('Gennady')),
self.parser.parse('name != "Gennady"'),
)
self.assertEqual(
Expression(Name('married'), Comparison('in'),
List([Const(True), Const(False)])),
self.parser.parse('married in (True, False)'),
)
self.assertEqual(
Expression(Name('smile'), Comparison('!='), Const(None)),
self.parser.parse('(smile != None)'),
)
self.assertEqual(
Expression(Name(['job', 'best', 'title']), Comparison('>'),
Const('none')),
self.parser.parse('job.best.title > "none"'),
)
def test_escaped_chars(self):
self.assertEqual(
Expression(Name('name'), Comparison('~'),
Const(u'Contains a "quoted" str, 年年有余')),
self.parser.parse(u'name ~ "Contains a \\"quoted\\" str, 年年有余"'),
)
self.assertEqual(
Expression(Name('options'), Comparison('='), Const(u'П и Щ')),
self.parser.parse(u'options = "\\u041f \\u0438 \\u0429"'),
)
def test_numbers(self):
self.assertEqual(
Expression(Name('pk'), Comparison('>'), Const(5)),
self.parser.parse('pk > 5'),
)
self.assertEqual(
Expression(Name('rating'), Comparison('<='), Const(523)),
self.parser.parse('rating <= 5.23e2'),
)
def test_logical(self):
self.assertEqual(
Expression(
Expression(Name('age'), Comparison('>='), Const(18)),
Logical('and'),
Expression(Name('age'), Comparison('<='), Const(45)),
),
self.parser.parse('age >= 18 and age <= 45'),
)
self.assertEqual(
Expression(
Expression(
Expression(Name('city'), Comparison('='), Const('Ivanovo')),
Logical('and'),
Expression(Name('age'), Comparison('<='), Const(35)),
),
Logical('or'),
Expression(
Expression(Name('city'), Comparison('='), Const('Paris')),
Logical('and'),
Expression(Name('age'), Comparison('<='), Const(45)),
),
),
self.parser.parse('(city = "Ivanovo" and age <= 35) or '
'(city = "Paris" and age <= 45)'),
)
def test_invalid_comparison(self):
for expr in ('foo > None', 'b <= True', 'c in False', '1 = 1', 'a > b'):
self.assertRaises(DjangoQLParserError, self.parser.parse, expr)
def test_entity_props(self):
self.assertEqual(
Expression(Name(['user', 'group', 'id']), Comparison('='),
Const(5)),
self.parser.parse('user.group.id = 5'),
)
|
drj11/pdftables
|
refs/heads/dev
|
pdftables/counter.py
|
3
|
"""
Implement collections.Counter for the benefit of Python 2.6
"""
from operator import itemgetter
from heapq import nlargest
from itertools import repeat, ifilter
class Counter(dict):
'''
Dict subclass for counting hashable objects. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> Counter('zyzygy')
Counter({'y': 3, 'z': 2, 'g': 1})
'''
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
self.update(iterable, **kwds)
def __missing__(self, key):
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abracadabra').most_common(3)
[('a', 5), ('r', 2), ('b', 2)]
'''
if n is None:
return sorted(self.iteritems(), key=itemgetter(1), reverse=True)
return nlargest(n, self.iteritems(), key=itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
If an element's count has been set to zero or is a negative number,
elements() will ignore it.
'''
for elem, count in self.iteritems():
for _ in repeat(None, count):
yield elem
# Override dict methods where the meaning changes for Counter objects.
@classmethod
def fromkeys(cls, iterable, v=None):
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
if iterable is not None:
if hasattr(iterable, 'iteritems'):
if self:
self_get = self.get
for elem, count in iterable.iteritems():
self[elem] = self_get(elem, 0) + count
else:
# fast path when counter is empty
dict.update(self, iterable)
else:
self_get = self.get
for elem in iterable:
self[elem] = self_get(elem, 0) + 1
if kwds:
self.update(kwds)
def copy(self):
"""
Like dict.copy() but returns a Counter instance instead of a dict.
"""
return Counter(self)
def __delitem__(self, elem):
"""
Like dict.__delitem__() but does not raise KeyError for missing values.
"""
if elem in self:
dict.__delitem__(self, elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem in set(self) | set(other):
newcount = self[elem] + other[elem]
if newcount > 0:
result[elem] = newcount
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem in set(self) | set(other):
newcount = self[elem] - other[elem]
if newcount > 0:
result[elem] = newcount
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
_max = max
result = Counter()
for elem in set(self) | set(other):
newcount = _max(self[elem], other[elem])
if newcount > 0:
result[elem] = newcount
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
_min = min
result = Counter()
if len(self) < len(other):
self, other = other, self
for elem in ifilter(self.__contains__, other):
newcount = _min(self[elem], other[elem])
if newcount > 0:
result[elem] = newcount
return result
if __name__ == '__main__':
import doctest
print doctest.testmod()
|
fubaz/djheroku
|
refs/heads/master
|
djheroku/fixture.py
|
1
|
''' Test fixture for minimal Django conf '''
SECRET_KEY = 'a'
DEBUG = True
DATABASES = {'default': {}}
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = ['django.contrib.contenttypes']
|
gabriel-laet/graphql-py
|
refs/heads/master
|
graphql/core/validation/__init__.py
|
2
|
from . import rules as Rules
from ..error import GraphQLError
from ..language.ast import FragmentDefinition, FragmentSpread
from ..language.visitor import Visitor, visit
from ..type import GraphQLSchema
from ..utils import TypeInfo
specified_rules = [
Rules.UniqueOperationNames,
Rules.LoneAnonymousOperation,
Rules.KnownTypeNames,
Rules.FragmentsOnCompositeTypes,
Rules.VariablesAreInputTypes,
Rules.ScalarLeafs,
Rules.FieldsOnCorrectType,
Rules.UniqueFragmentNames,
Rules.KnownFragmentNames,
Rules.NoUnusedFragments,
Rules.PossibleFragmentSpreads,
Rules.NoFragmentCycles,
Rules.NoUndefinedVariables,
Rules.NoUnusedVariables,
Rules.KnownDirectives,
Rules.KnownArgumentNames,
Rules.UniqueArgumentNames,
Rules.ArgumentsOfCorrectType,
Rules.ProvidedNonNullArguments,
Rules.DefaultValuesOfCorrectType,
Rules.VariablesInAllowedPosition,
Rules.OverlappingFieldsCanBeMerged,
Rules.UniqueInputFieldNames
]
def validate(schema, ast, rules=None):
assert schema, 'Must provide schema'
assert ast, 'Must provide document'
assert isinstance(schema, GraphQLSchema)
if rules is None:
rules = specified_rules
return visit_using_rules(schema, ast, rules)
def visit_using_rules(schema, ast, rules):
type_info = TypeInfo(schema)
context = ValidationContext(schema, ast, type_info)
errors = []
for rule in rules:
instance = rule(context)
visit(ast, ValidationVisitor(instance, type_info, errors))
return errors
class ValidationVisitor(Visitor):
def __init__(self, instance, type_info, errors):
self.instance = instance
self.type_info = type_info
self.errors = errors
def enter(self, node, key, parent, path, ancestors):
self.type_info.enter(node)
if isinstance(node, FragmentDefinition) and key and hasattr(self.instance, 'visit_spread_fragments'):
return False
result = self.instance.enter(node, key, parent, path, ancestors)
if result and is_error(result):
append(self.errors, result)
result = False
if result is None and getattr(self.instance, 'visit_spread_fragments', False) and isinstance(node, FragmentSpread):
fragment = self.instance.context.get_fragment(node.name.value)
if fragment:
visit(fragment, self)
if result is False:
self.type_info.leave(node)
return result
def leave(self, node, key, parent, path, ancestors):
result = self.instance.leave(node, key, parent, path, ancestors)
if result and is_error(result):
append(self.errors, result)
result = False
self.type_info.leave(node)
return result
def is_error(value):
if isinstance(value, list):
return all(isinstance(item, GraphQLError) for item in value)
return isinstance(value, GraphQLError)
def append(arr, items):
if isinstance(items, list):
arr.extend(items)
else:
arr.append(items)
class ValidationContext(object):
def __init__(self, schema, ast, type_info):
self._schema = schema
self._ast = ast
self._type_info = type_info
self._fragments = None
def get_schema(self):
return self._schema
def get_ast(self):
return self._ast
def get_fragment(self, name):
fragments = self._fragments
if fragments is None:
self._fragments = fragments = {}
for statement in self.get_ast().definitions:
if isinstance(statement, FragmentDefinition):
fragments[statement.name.value] = statement
return fragments.get(name)
def get_type(self):
return self._type_info.get_type()
def get_parent_type(self):
return self._type_info.get_parent_type()
def get_input_type(self):
return self._type_info.get_input_type()
def get_field_def(self):
return self._type_info.get_field_def()
def get_directive(self):
return self._type_info.get_directive()
def get_argument(self):
return self._type_info.get_argument()
|
ntuecon/server
|
refs/heads/master
|
pyenv/Lib/site-packages/pip/_vendor/html5lib/treewalkers/base.py
|
355
|
from __future__ import absolute_import, division, unicode_literals
from xml.dom import Node
from ..constants import namespaces, voidElements, spaceCharacters
__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN",
"TreeWalker", "NonRecursiveTreeWalker"]
DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
ENTITY = Node.ENTITY_NODE
UNKNOWN = "<#UNKNOWN#>"
spaceCharacters = "".join(spaceCharacters)
class TreeWalker(object):
def __init__(self, tree):
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
return {"type": "SerializeError", "data": msg}
def emptyTag(self, namespace, name, attrs, hasChildren=False):
yield {"type": "EmptyTag", "name": name,
"namespace": namespace,
"data": attrs}
if hasChildren:
yield self.error("Void element has children")
def startTag(self, namespace, name, attrs):
return {"type": "StartTag",
"name": name,
"namespace": namespace,
"data": attrs}
def endTag(self, namespace, name):
return {"type": "EndTag",
"name": name,
"namespace": namespace}
def text(self, data):
data = data
middle = data.lstrip(spaceCharacters)
left = data[:len(data) - len(middle)]
if left:
yield {"type": "SpaceCharacters", "data": left}
data = middle
middle = data.rstrip(spaceCharacters)
right = data[len(middle):]
if middle:
yield {"type": "Characters", "data": middle}
if right:
yield {"type": "SpaceCharacters", "data": right}
def comment(self, data):
return {"type": "Comment", "data": data}
def doctype(self, name, publicId=None, systemId=None):
return {"type": "Doctype",
"name": name,
"publicId": publicId,
"systemId": systemId}
def entity(self, name):
return {"type": "Entity", "name": name}
def unknown(self, nodeType):
return self.error("Unknown node type: " + nodeType)
class NonRecursiveTreeWalker(TreeWalker):
def getNodeDetails(self, node):
raise NotImplementedError
def getFirstChild(self, node):
raise NotImplementedError
def getNextSibling(self, node):
raise NotImplementedError
def getParentNode(self, node):
raise NotImplementedError
def __iter__(self):
currentNode = self.tree
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
hasChildren = False
if type == DOCTYPE:
yield self.doctype(*details)
elif type == TEXT:
for token in self.text(*details):
yield token
elif type == ELEMENT:
namespace, name, attributes, hasChildren = details
if (not namespace or namespace == namespaces["html"]) and name in voidElements:
for token in self.emptyTag(namespace, name, attributes,
hasChildren):
yield token
hasChildren = False
else:
yield self.startTag(namespace, name, attributes)
elif type == COMMENT:
yield self.comment(details[0])
elif type == ENTITY:
yield self.entity(details[0])
elif type == DOCUMENT:
hasChildren = True
else:
yield self.unknown(details[0])
if hasChildren:
firstChild = self.getFirstChild(currentNode)
else:
firstChild = None
if firstChild is not None:
currentNode = firstChild
else:
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
if type == ELEMENT:
namespace, name, attributes, hasChildren = details
if (namespace and namespace != namespaces["html"]) or name not in voidElements:
yield self.endTag(namespace, name)
if self.tree is currentNode:
currentNode = None
break
nextSibling = self.getNextSibling(currentNode)
if nextSibling is not None:
currentNode = nextSibling
break
else:
currentNode = self.getParentNode(currentNode)
|
mihaip/NewsBlur
|
refs/heads/master
|
vendor/seacucumber/management/commands/ses_usage.py
|
20
|
"""
Shows some usage levels and limits for the last and previous 24 hours.
"""
import datetime
from django.core.management.base import BaseCommand
from seacucumber.util import get_boto_ses_connection
class Command(BaseCommand):
"""
This command shows some really vague usage and quota stats from SES.
"""
help = "Shows SES usage and quota limits."
def handle(self, *args, **options):
"""
Renders the output by piecing together a few methods that do the
dirty work.
"""
# AWS SES connection, which can be re-used for each query needed.
conn = get_boto_ses_connection()
self._print_quota(conn)
self._print_daily_stats(conn)
def _print_quota(self, conn):
"""
Prints some basic quota statistics.
"""
quota = conn.get_send_quota()
quota = quota['GetSendQuotaResponse']['GetSendQuotaResult']
print "--- SES Quota ---"
print " 24 Hour Quota: %s" % quota['Max24HourSend']
print " Sent (Last 24 hours): %s" % quota['SentLast24Hours']
print " Max sending rate: %s/sec" % quota['MaxSendRate']
def _print_daily_stats(self, conn):
"""
Prints a Today/Last 24 hour stats section.
"""
stats = conn.get_send_statistics()
stats = stats['GetSendStatisticsResponse']['GetSendStatisticsResult']
stats = stats['SendDataPoints']
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
current_day = {'HeaderName': 'Current Day: %s/%s' % (today.month,
today.day)}
prev_day = {'HeaderName': 'Yesterday: %s/%s' % (yesterday.month,
yesterday.day)}
for data_point in stats:
if self._is_data_from_today(data_point):
day_dict = current_day
else:
day_dict = prev_day
self._update_day_dict(data_point, day_dict)
for day in [current_day, prev_day]:
print "--- %s ---" % day.get('HeaderName', 0)
print " Delivery attempts: %s" % day.get('DeliveryAttempts', 0)
print " Bounces: %s" % day.get('Bounces', 0)
print " Rejects: %s" % day.get('Rejects', 0)
print " Complaints: %s" % day.get('Complaints', 0)
def _is_data_from_today(self, data_point):
"""
Takes a DataPoint from SESConnection.get_send_statistics() and returns
True if it is talking about the current date, False if not.
:param dict data_point: The data point to consider.
:rtype: bool
:returns: True if this data_point is for today, False if not (probably
yesterday).
"""
today = datetime.date.today()
raw_timestr = data_point['Timestamp']
dtime = datetime.datetime.strptime(raw_timestr, '%Y-%m-%dT%H:%M:%SZ')
return today.day == dtime.day
def _update_day_dict(self, data_point, day_dict):
"""
Helper method for :meth:`_print_daily_stats`. Given a data point and
the correct day dict, update attribs on the dict with the contents
of the data point.
:param dict data_point: The data point to add to the day's stats dict.
:param dict day_dict: A stats-tracking dict for a 24 hour period.
"""
for topic in ['Bounces', 'Complaints', 'DeliveryAttempts', 'Rejects']:
day_dict[topic] = day_dict.get(topic, 0) + int(data_point[topic])
|
jianlirong/incubator-hawq
|
refs/heads/master
|
tools/bin/gppylib/utils.py
|
12
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import shutil, filecmp,re
import os, fcntl, select, getpass, socket
import stat
from subprocess import *
from sys import *
from xml.dom import minidom
from xml.dom import Node
from gppylib.gplog import *
logger = get_default_logger()
_debug=0
#############
class ParseError(Exception):
def __init__(self,parseType):
self.msg = ('%s parsing error'%(parseType))
def __str__(self):
return self.msg
#############
class RangeError(Exception):
def __init__(self, value1, value2):
self.msg = ('%s must be less then %s' % (value1, value2))
def __str__(self):
return self.msg
#############
def createFromSingleHostFile(inputFile):
"""TODO: """
rows=[]
f = open(inputFile, 'r')
for line in f:
rows.append(parseSingleFile(line))
return rows
#############
def toNonNoneString(value) :
if value is None:
return ""
return str(value)
#
# if value is None then an exception is raised
#
# otherwise value is returned
#
def checkNotNone(label, value):
if value is None:
raise Exception( label + " is None")
return value
#
# value should be non-None
#
def checkIsInt(label, value):
if type(value) != type(0):
raise Exception( label + " is not an integer type" )
def isNone( value):
isN=False
if value is None:
isN=True
elif value =="":
isN= True
return isN
def readAllLinesFromFile(fileName, stripLines=False, skipEmptyLines=False):
"""
@param stripLines if true then line.strip() is called on each line read
@param skipEmptyLines if true then empty lines are not returned. Beware! This will throw off your line counts
if you are relying on line counts
"""
res = []
f = open(fileName)
try:
for line in f:
if stripLines:
line = line.strip()
if skipEmptyLines and len(line) == 0:
# skip it!
pass
else:
res.append(line)
finally:
f.close()
return res
def writeLinesToFile(fileName, lines):
f = open(fileName, 'w')
try:
for line in lines:
f.write(line)
f.write('\n')
finally:
f.close()
#############
def parseSingleFile(line):
ph=None
if re.search(r"^#", line):
#skip it, it's a comment
pass
else:
ph=line.rstrip("\n").rstrip()
return ph
def openAnything(source):
"""URI, filename, or string --> stream
This function lets you define parsers that take any input source
(URL, pathname to local or network file, or actual data as a string)
and deal with it in a uniform manner. Returned object is guaranteed
to have all the basic stdio read methods (read, readline, readlines).
Just .close() the object when you're done with it.
Examples:
>>> from xml.dom import minidom
>>> sock = openAnything("http://localhost/kant.xml")
>>> doc = minidom.parse(sock)
>>> sock.close()
>>> sock = openAnything("c:\\inetpub\\wwwroot\\kant.xml")
>>> doc = minidom.parse(sock)
>>> sock.close()
>>> sock = openAnything("<ref id='conjunction'><text>and</text><text>or</text></ref>")
>>> doc = minidom.parse(sock)
>>> sock.close()
"""
if hasattr(source, "read"):
return source
if source == '-':
import sys
return sys.stdin
# try to open with urllib (if source is http, ftp, or file URL)
import urllib
try:
return urllib.urlopen(source)
except (IOError, OSError):
pass
# try to open with native open function (if source is pathname)
try:
return open(source)
except Exception, e:
print ("Exception occurred opening file %s Error: %s" % (source, str(e)))
# treat source as string
import StringIO
return StringIO.StringIO(str(source))
def getOs():
dist=None
fdesc = None
RHId = "/etc/redhat-release"
SuSEId = "/etc/SuSE-release"
try:
fdesc = open(RHId)
for line in fdesc:
line = line.rstrip()
if re.match('CentOS', line):
dist = 'CentOS'
if re.match('Red Hat', line):
dist = 'CentOS'
except IOError:
pass
finally:
if fdesc :
fdesc.close()
try:
fdesc = open(SuSEId)
for line in fdesc:
line = line.rstrip()
if re.match('SUSE', line):
dist = 'SuSE'
except IOError:
pass
finally:
if fdesc :
fdesc.close()
return dist
def factory(aClass, *args):
return apply(aClass,args)
def addDicts(a,b):
c = dict(a)
c.update(b)
return c
def joinPath(a,b,parm=""):
c=a+parm+b
return c
def debug(varname, o):
if _debug == 1:
print "Debug: %s -> %s" %(varname, o)
def loadXmlElement(config,elementName):
fdesc = openAnything(config)
xmldoc = minidom.parse(fdesc).documentElement
fdesc.close()
elements=xmldoc.getElementsByTagName(elementName)
return elements
def docIter(node):
"""
Iterates over each node in document order, returning each in turn
"""
#Document order returns the current node,
#then each of its children in turn
yield node
if node.nodeType == Node.ELEMENT_NODE:
#Attributes are stored in a dictionary and
#have no set order. The values() call
#gets a list of actual attribute node objects
#from the dictionary
for attr in node.attributes.values():
yield attr
for child in node.childNodes:
#Create a generator for each child,
#Over which to iterate
for cn in docIter(child):
yield cn
return
def makeNonBlocking(fd):
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
try:
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NDELAY)
except IOError:
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NDELAY)
def getCommandOutput2(command):
child = popen2.Popen3(command, 1) # Capture stdout and stderr from command
child.tochild.close( ) # don't need to write to child's stdin
outfile = child.fromchild
outfd = outfile.fileno( )
errfile = child.childerr
errfd = errfile.fileno( )
makeNonBlocking(outfd) # Don't deadlock! Make fd's nonblocking.
makeNonBlocking(errfd)
outdata, errdata = [ ], [ ]
outeof = erreof = False
while True:
to_check = [outfd]*(not outeof) + [errfd]*(not erreof)
ready = select.select(to_check, [ ], [ ]) # Wait for input
if outfd in ready[0]:
outchunk = outfile.read( )
if outchunk == '':
outeof = True
else:
outdata.append(outchunk)
if errfd in ready[0]:
errchunk = errfile.read( )
if errchunk == '':
erreof = True
else:
errdata.append(errchunk)
if outeof and erreof:
break
select.select([ ],[ ],[ ],.1) # Allow a little time for buffers to fill
err = child.wait( )
if err != 0:
raise RuntimeError, '%r failed with exit code %d\n%s' % (
command, err, ''.join(errdata))
return ''.join(outdata)
def getCommandOutput(command):
child = os.popen(command)
data = child.read( )
err = child.close( )
#if err :
# raise RuntimeError, '%r failed with exit code %d' % (command, err)
return ''.join(data)
def touchFile(fileName):
if os.path.exists(fileName):
os.remove(fileName)
fi=open(fileName,'w')
fi.close()
def deleteBlock(fileName,beginPattern, endPattern):
#httpdConfFile="/etc/httpd/conf/httpd.conf"
fileNameTmp= fileName +".tmp"
if beginPattern is None :
beginPattern = '#gp begin'
if endPattern is None :
endPattern = '#gp end'
beginLineNo = 0
endLineNo = 0
lineNo =1
#remove existing gp existing entry
if os.path.isfile(fileName):
try:
fdesc = open(fileName)
lines = fdesc.readlines()
fdesc.close()
for line in lines:
line = line.rstrip()
if re.match(beginPattern, line):
beginLineNo = lineNo
#print line
#print beginLineNo
if re.match(endPattern, line) and (beginLineNo != 0):
endLineNo = lineNo
#print endLineNo
lineNo += 1
#print lines[beginLineNo-1:endLineNo]
del lines[beginLineNo-1:endLineNo]
fdesc = open(fileNameTmp,"w")
fdesc.writelines(lines)
fdesc.close()
os.rename(fileNameTmp,fileName)
except IOError:
print("IOERROR", IOError)
sys.exit()
else:
print "***********%s file does not exits"%(fileName)
def make_inf_hosts(hp, hstart, hend, istart, iend, hf=None):
hfArr = []
inf_hosts=[]
if None != hf:
hfArr=hf.split('-')
print hfArr
for h in range(int(hstart), int(hend)+1):
host = '%s%d' % (hp, h)
for i in range(int(istart), int(iend)+1):
if i != 0 :
inf_hosts.append('%s-%s' % (host, i))
else:
inf_hosts.append('%s' % (host))
return inf_hosts
def copyFile(srcDir,srcFile, destDir, destFile):
result=""
filePath=os.path.join(srcDir, srcFile)
destPath=os.path.join(destDir,destFile)
if not os.path.exists(destDir):
os.makedirs(destDir)
try:
if os.path.isfile(filePath):
#debug("filePath" , filePath)
#debug("destPath" , destPath)
pipe=os.popen("/bin/cp -avf " +filePath +" "+destPath)
result=pipe.read().strip()
#debug ("result",result)
else:
print "no such file or directory " + filePath
except OSError:
print ("OS Error occurred")
return result
def parseKeyColonValueLines(str):
"""
Given a string contain key:value lines, parse the lines and return a map of key->value
Returns None if there was a problem parsing
"""
res = {}
for line in str.split("\n"):
line = line.strip()
if line == "":
continue
colon = line.find(":")
if colon == -1:
logger.warn("Error parsing data, no colon on line %s" % line)
return None
key = line[:colon]
value = line[colon+1:]
res[key] = value
return res
def sortedDictByKey(di):
return [ (k,di[k]) for k in sorted(di.keys())]
def appendNewEntriesToHbaFile(fileName, segments):
"""
Will raise Exception if there is a problem updating the hba file
"""
try:
#
# Get the list of lines that already exist...we won't write those again
#
# Replace runs of whitespace with single space to improve deduping
#
def lineToCanonical(s):
s = re.sub("\s", " ", s) # first reduce whitespace runs to single space
s = re.sub(" $", "", s) # remove trailing space
s = re.sub("^ ", "", s) # remove leading space
return s
existingLineMap = {}
for line in readAllLinesFromFile(fileName):
existingLineMap[lineToCanonical(line)] = True
fp = open(fileName, 'a')
try:
for newSeg in segments:
address = newSeg.getSegmentAddress()
addrinfo = socket.getaddrinfo(address, None)
ipaddrlist = list(set([ (ai[0], ai[4][0]) for ai in addrinfo]))
haveWrittenCommentHeader = False
for addr in ipaddrlist:
newLine = 'host\tall\tall\t%s/%s\ttrust' % (addr[1], '32' if addr[0] == socket.AF_INET else '128')
newLineCanonical = lineToCanonical(newLine)
if newLineCanonical not in existingLineMap:
if not haveWrittenCommentHeader:
fp.write('# %s\n' % address)
haveWrittenCommentHeader = True
fp.write(newLine)
fp.write('\n')
existingLineMap[newLineCanonical] = True
finally:
fp.close()
except IOError, msg:
raise Exception('Failed to open %s' % fileName)
except Exception, msg:
raise Exception('Failed to add new segments to template %s' % fileName)
class TableLogger:
"""
Use this by constructing it, then calling warn, info, and infoOrWarn with arrays of columns, then outputTable
"""
def __init__(self):
self.__lines = []
self.__warningLines = {}
#
# If True, then warn calls will produce arrows as well at the end of the lines
# Note that this affects subsequent calls to warn and infoOrWarn
#
self.__warnWithArrows = False
def setWarnWithArrows(self, warnWithArrows):
"""
Change the "warn with arrows" behavior for subsequent calls to warn and infoOrWarn
If warnWithArrows is True then warning lines are printed with arrows at the end
returns self
"""
self.__warnWithArrows = warnWithArrows
return self
def warn(self, line):
"""
return self
"""
self.__warningLines[len(self.__lines)] = True
line = [s for s in line]
if self.__warnWithArrows:
line.append( "<<<<<<<<")
self.__lines.append(line)
return self
def info(self, line):
"""
return self
"""
self.__lines.append([s for s in line])
return self
def infoOrWarn(self, warnIfTrue, line):
"""
return self
"""
if warnIfTrue:
self.warn(line)
else: self.info(line)
return self
def outputTable(self):
"""
return self
"""
lines = self.__lines
warningLineNumbers = self.__warningLines
lineWidth = []
for line in lines:
if line is not None:
while len(lineWidth) < len(line):
lineWidth.append(0)
for i, field in enumerate(line):
lineWidth[i] = max(len(field), lineWidth[i])
# now print it all!
for lineNumber, line in enumerate(lines):
doWarn = warningLineNumbers.get(lineNumber)
if line is None:
#
# separator
#
logger.info("----------------------------------------------------")
else:
outLine = []
for i, field in enumerate(line):
if i == len(line) - 1:
# don't pad the last one since it's not strictly needed,
# and we could have a really long last column for some lines
outLine.append(field)
else:
outLine.append(field.ljust(lineWidth[i] + 3))
msg = "".join(outLine)
if doWarn:
logger.warn(msg)
else:
logger.info(" " + msg) # add 3 so that lines will line up even with the INFO and WARNING stuff on front
return self
def addSeparator(self):
self.__lines.append(None)
def getNumLines(self):
return len(self.__lines)
def getNumWarnings(self):
return len(self.__warningLines)
def hasWarnings(self):
return self.getNumWarnings() > 0
class ParsedConfigFile:
"""
returned by call to parseMirroringConfigFile
"""
def __init__( self, flexibleHeaders, rows):
self.__flexibleHeaders = flexibleHeaders
self.__rows = rows
def getRows(self):
"""
@return a non-None list of ParsedConfigFileRow
"""
return self.__rows
def getFlexibleHeaders(self):
"""
@return a non-None list of strings
"""
return self.__flexibleHeaders
class ParsedConfigFileRow:
"""
used as part of ParseConfigFile, returned by call to parseMirroringConfigFile
"""
def __init__(self, fixedValuesMap, flexibleValuesMap, line):
self.__fixedValuesMap = fixedValuesMap
self.__flexibleValuesMap = flexibleValuesMap
self.__line = line
def getFixedValuesMap(self):
"""
@return non-None dictionary
"""
return self.__fixedValuesMap
def getFlexibleValuesMap(self):
"""
@return non-None dictionary
"""
return self.__flexibleValuesMap
def getLine(self):
"""
@return the actual line that produced this config row; can be used for error reporting
"""
return self.__line
def parseMirroringConfigFile( lines, fileLabelForExceptions, fixedHeaderNames, keyForFlexibleHeaders,
linesWillHaveLineHeader, numberRequiredHeadersForRecoversegFormat=None):
"""
Read a config file that is in the mirroring or recoverseg config format
@params lines the list of Strings to parse
@param staticHeaders a list of Strings, listing what should appear as the first length(staticHeaders)
values in each row
@param keyForFlexibleHeaders if None then no extra values are read, otherwise it's the key for flexible
headers that should be passed. If this is passed then the first line of the file
should look like keyValue=a1:a2:...a3
@param numberRequiredHeadersForRecoversegFormat if not None then the line can be either this
many elements from fixedHeaderNames, or that many elements then a space separator and
then the remaining required ones. If we consolidate formats then we could remove
this hacky option
@return a list of values
todo: should allow escaping of colon values, or switch to CSV and support CSV escaping
"""
lines = [s.strip() for s in lines if len(s.strip()) > 0]
# see if there is the flexible header
rows = []
flexibleHeaders = []
if keyForFlexibleHeaders is not None:
if len(lines) == 0:
raise Exception("Missing header line with %s= values specified" % keyForFlexibleHeaders )
flexHeaderLineSplit = lines[0].split("=")
if len(flexHeaderLineSplit) != 2 or flexHeaderLineSplit[0] != keyForFlexibleHeaders:
raise Exception('%s format error for first line %s' % (fileLabelForExceptions, lines[0]))
str = flexHeaderLineSplit[1].strip()
if len(str) > 0:
flexibleHeaders = str.split(":")
lines = lines[1:]
# now read the real lines
numExpectedValuesPerLine = len(fixedHeaderNames) + len(flexibleHeaders)
for line in lines:
origLine = line
if linesWillHaveLineHeader:
arr = line.split("=")
if len(arr) != 2:
raise Exception('%s format error for line %s' % (fileLabelForExceptions, line))
line = arr[1]
numExpectedThisLine = numExpectedValuesPerLine
fixedToRead = fixedHeaderNames
flexibleToRead = flexibleHeaders
if numberRequiredHeadersForRecoversegFormat is not None:
arr = line.split()
if len(arr) == 1:
numExpectedThisLine = numberRequiredHeadersForRecoversegFormat
fixedToRead = fixedHeaderNames[0:numberRequiredHeadersForRecoversegFormat]
flexibleToRead = []
elif len(arr) == 2:
# read the full ones, treat it like one big line
line = arr[0] + ":" + arr[1]
else: raise Exception('config file format error. %s' % line)
arr = line.split(":")
if len(arr) != numExpectedThisLine:
raise Exception('%s format error for line (wrong number of values. '
'Found %d but expected %d) : %s' %
(fileLabelForExceptions, len(arr), numExpectedThisLine, line))
fixedValuesMap = {}
flexibleValuesMap = {}
index = 0
for name in fixedToRead:
fixedValuesMap[name] = arr[index]
index += 1
for name in flexibleToRead:
flexibleValuesMap[name] = arr[index]
index += 1
rows.append(ParsedConfigFileRow(fixedValuesMap, flexibleValuesMap, origLine))
return ParsedConfigFile( flexibleHeaders, rows)
def createSegmentSpecificPath(path, gpPrefix, segment):
"""
Create a segment specific path for the given gpPrefix and segment
@param gpPrefix a string used to prefix directory names
@param segment a GpDB value
"""
return os.path.join(path, '%s%d' % (gpPrefix, segment.getSegmentContentId()))
class PathNormalizationException(Exception):
pass
def normalizeAndValidateInputPath(path, errorMessagePathSource=None, errorMessagePathFullInput=None):
"""
Raises a PathNormalizationException if the path is not an absolute path or an url. The exception msg will use
errorMessagePathSource and errorMessagePathFullInput to build the error message.
Does not check that the path exists
@param errorMessagePathSource from where the path was read such as "by user", "in file"
@param errorMessagePathFullInput the full input (line, for example) from which the path was read; for example,
if the path is part of a larger line of input read then you can pass the full line here
"""
path = path.strip()
url_pattern = "^[a-z][-a-z0-9\+\.]*://"
if re.match(url_pattern, path, flags=re.IGNORECASE) != None:
return path
if not os.path.isabs(path):
firstPart = " " if errorMessagePathSource is None else " " + errorMessagePathSource + " "
secondPart = "" if errorMessagePathFullInput is None else " from: %s" % errorMessagePathFullInput
raise PathNormalizationException("Path entered%sis invalid; it must be a full path or url. Path: '%s'%s" %
( firstPart, path, secondPart ))
return os.path.normpath(path)
def canStringBeParsedAsInt(str):
"""
return True if int(str) would produce a value rather than throwing an error,
else return False
"""
try:
int(str)
return True
except ValueError:
return False
|
erwilan/ansible
|
refs/heads/devel
|
test/units/modules/network/vyos/test_vyos_config.py
|
77
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.vyos import vyos_config
from .vyos_module import TestVyosModule, load_fixture, set_module_args
class TestVyosConfigModule(TestVyosModule):
module = vyos_config
def setUp(self):
self.mock_get_config = patch('ansible.modules.network.vyos.vyos_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.vyos.vyos_config.load_config')
self.load_config = self.mock_load_config.start()
self.mock_run_commands = patch('ansible.modules.network.vyos.vyos_config.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
config_file = 'vyos_config_config.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_vyos_config_unchanged(self):
src = load_fixture('vyos_config_config.cfg')
set_module_args(dict(src=src))
self.execute_module()
def test_vyos_config_src(self):
src = load_fixture('vyos_config_src.cfg')
set_module_args(dict(src=src))
commands = ['set system host-name foo', 'delete interfaces ethernet eth0 address']
self.execute_module(changed=True, commands=commands)
def test_vyos_config_src_brackets(self):
src = load_fixture('vyos_config_src_brackets.cfg')
set_module_args(dict(src=src))
commands = ['set interfaces ethernet eth0 address 10.10.10.10/24', 'set system host-name foo']
self.execute_module(changed=True, commands=commands)
def test_vyos_config_backup(self):
set_module_args(dict(backup=True))
result = self.execute_module()
self.assertIn('__backup__', result)
def test_vyos_config_lines(self):
commands = ['set system host-name foo']
set_module_args(dict(lines=commands))
self.execute_module(changed=True, commands=commands)
def test_vyos_config_config(self):
config = 'set system host-name localhost'
new_config = ['set system host-name router']
set_module_args(dict(lines=new_config, config=config))
self.execute_module(changed=True, commands=new_config)
def test_vyos_config_match_none(self):
lines = ['set system interfaces ethernet eth0 address 1.2.3.4/24',
'set system interfaces ethernet eth0 description test string']
set_module_args(dict(lines=lines, match='none'))
self.execute_module(changed=True, commands=lines, sort=False)
|
bentwire/mbed
|
refs/heads/master
|
workspace_tools/host_tests/udp_link_layer_auto.py
|
124
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
How to use:
make.py -m LPC1768 -t ARM -d E:\ -n NET_14
udp_link_layer_auto.py -p COM20 -d E:\ -t 10
"""
import re
import uuid
import socket
import thread
from sys import stdout
from time import time, sleep
from host_test import DefaultTest
from SocketServer import BaseRequestHandler, UDPServer
# Received datagrams (with time)
dict_udp_recv_datagrams = dict()
# Sent datagrams (with time)
dict_udp_sent_datagrams = dict()
class UDPEchoClient_Handler(BaseRequestHandler):
def handle(self):
""" One handle per connection
"""
_data, _socket = self.request
# Process received datagram
data_str = repr(_data)[1:-1]
dict_udp_recv_datagrams[data_str] = time()
def udp_packet_recv(threadName, server_ip, server_port):
""" This function will receive packet stream from mbed device
"""
server = UDPServer((server_ip, server_port), UDPEchoClient_Handler)
print "[UDP_COUNTER] Listening for connections... %s:%d"% (server_ip, server_port)
server.serve_forever()
class UDPEchoServerTest(DefaultTest):
ECHO_SERVER_ADDRESS = "" # UDP IP of datagram bursts
ECHO_PORT = 0 # UDP port for datagram bursts
CONTROL_PORT = 23 # TCP port used to get stats from mbed device, e.g. counters
s = None # Socket
TEST_PACKET_COUNT = 1000 # how many packets should be send
TEST_STRESS_FACTOR = 0.001 # stress factor: 10 ms
PACKET_SATURATION_RATIO = 29.9 # Acceptable packet transmission in %
PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
def get_control_data(self, command="stat\n"):
BUFFER_SIZE = 256
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ECHO_SERVER_ADDRESS, self.CONTROL_PORT))
except Exception, e:
data = None
s.send(command)
data = s.recv(BUFFER_SIZE)
s.close()
return data
def test(self):
serial_ip_msg = self.mbed.serial_readline()
if serial_ip_msg is None:
return self.RESULT_IO_SERIAL
stdout.write(serial_ip_msg)
stdout.flush()
# Searching for IP address and port prompted by server
m = self.re_detect_server_ip.search(serial_ip_msg)
if m and len(m.groups()):
self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
self.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
# Open client socket to burst datagrams to UDP server in mbed
try:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except Exception, e:
self.s = None
self.notify("HOST: Error: %s"% e)
return self.RESULT_ERROR
# UDP replied receiver works in background to get echoed datagrams
SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
SERVER_PORT = self.ECHO_PORT + 1
thread.start_new_thread(udp_packet_recv, ("Thread-udp-recv", SERVER_IP, SERVER_PORT))
sleep(0.5)
# Burst part
for no in range(self.TEST_PACKET_COUNT):
TEST_STRING = str(uuid.uuid4())
payload = str(no) + "__" + TEST_STRING
self.s.sendto(payload, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
dict_udp_sent_datagrams[payload] = time()
sleep(self.TEST_STRESS_FACTOR)
if self.s is not None:
self.s.close()
# Wait 5 seconds for packets to come
result = True
self.notify("HOST: Test Summary:")
for d in range(5):
sleep(1.0)
summary_datagram_success = (float(len(dict_udp_recv_datagrams)) / float(self.TEST_PACKET_COUNT)) * 100.0
self.notify("HOST: Datagrams received after +%d sec: %.3f%% (%d / %d), stress=%.3f ms"% (d,
summary_datagram_success,
len(dict_udp_recv_datagrams),
self.TEST_PACKET_COUNT,
self.TEST_STRESS_FACTOR))
result = result and (summary_datagram_success >= self.PACKET_SATURATION_RATIO)
stdout.flush()
# Getting control data from test
self.notify("...")
self.notify("HOST: Mbed Summary:")
mbed_stats = self.get_control_data()
self.notify(mbed_stats)
return self.RESULT_SUCCESS if result else self.RESULT_FAILURE
if __name__ == '__main__':
UDPEchoServerTest().run()
|
pepeantena4040/MiSitioWeb
|
refs/heads/master
|
scripts/lib/wic/3rdparty/pykickstart/base.py
|
14
|
#
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2006, 2007, 2008 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
"""
Base classes for creating commands and syntax version object.
This module exports several important base classes:
BaseData - The base abstract class for all data objects. Data objects
are contained within a BaseHandler object.
BaseHandler - The base abstract class from which versioned kickstart
handler are derived. Subclasses of BaseHandler hold
BaseData and KickstartCommand objects.
DeprecatedCommand - An abstract subclass of KickstartCommand that should
be further subclassed by users of this module. When
a subclass is used, a warning message will be
printed.
KickstartCommand - The base abstract class for all kickstart commands.
Command objects are contained within a BaseHandler
object.
"""
import gettext
gettext.textdomain("pykickstart")
_ = lambda x: gettext.ldgettext("pykickstart", x)
import types
import warnings
from pykickstart.errors import *
from pykickstart.ko import *
from pykickstart.parser import Packages
from pykickstart.version import versionToString
###
### COMMANDS
###
class KickstartCommand(KickstartObject):
"""The base class for all kickstart commands. This is an abstract class."""
removedKeywords = []
removedAttrs = []
def __init__(self, writePriority=0, *args, **kwargs):
"""Create a new KickstartCommand instance. This method must be
provided by all subclasses, but subclasses must call
KickstartCommand.__init__ first. Instance attributes:
currentCmd -- The name of the command in the input file that
caused this handler to be run.
currentLine -- The current unprocessed line from the input file
that caused this handler to be run.
handler -- A reference to the BaseHandler subclass this
command is contained withing. This is needed to
allow referencing of Data objects.
lineno -- The current line number in the input file.
writePriority -- An integer specifying when this command should be
printed when iterating over all commands' __str__
methods. The higher the number, the later this
command will be written. All commands with the
same priority will be written alphabetically.
"""
# We don't want people using this class by itself.
if self.__class__ is KickstartCommand:
raise TypeError, "KickstartCommand is an abstract class."
KickstartObject.__init__(self, *args, **kwargs)
self.writePriority = writePriority
# These will be set by the dispatcher.
self.currentCmd = ""
self.currentLine = ""
self.handler = None
self.lineno = 0
# If a subclass provides a removedKeywords list, remove all the
# members from the kwargs list before we start processing it. This
# ensures that subclasses don't continue to recognize arguments that
# were removed.
for arg in filter(kwargs.has_key, self.removedKeywords):
kwargs.pop(arg)
def __call__(self, *args, **kwargs):
"""Set multiple attributes on a subclass of KickstartCommand at once
via keyword arguments. Valid attributes are anything specified in
a subclass, but unknown attributes will be ignored.
"""
for (key, val) in kwargs.items():
# Ignore setting attributes that were removed in a subclass, as
# if they were unknown attributes.
if key in self.removedAttrs:
continue
if hasattr(self, key):
setattr(self, key, val)
def __str__(self):
"""Return a string formatted for output to a kickstart file. This
method must be provided by all subclasses.
"""
return KickstartObject.__str__(self)
def parse(self, args):
"""Parse the list of args and set data on the KickstartCommand object.
This method must be provided by all subclasses.
"""
raise TypeError, "parse() not implemented for KickstartCommand"
def apply(self, instroot="/"):
"""Write out the configuration related to the KickstartCommand object.
Subclasses which do not provide this method will not have their
configuration written out.
"""
return
def dataList(self):
"""For commands that can occur multiple times in a single kickstart
file (like network, part, etc.), return the list that we should
append more data objects to.
"""
return None
def deleteRemovedAttrs(self):
"""Remove all attributes from self that are given in the removedAttrs
list. This method should be called from __init__ in a subclass,
but only after the superclass's __init__ method has been called.
"""
for attr in filter(lambda k: hasattr(self, k), self.removedAttrs):
delattr(self, attr)
# Set the contents of the opts object (an instance of optparse.Values
# returned by parse_args) as attributes on the KickstartCommand object.
# It's useful to call this from KickstartCommand subclasses after parsing
# the arguments.
def _setToSelf(self, optParser, opts):
self._setToObj(optParser, opts, self)
# Sets the contents of the opts object (an instance of optparse.Values
# returned by parse_args) as attributes on the provided object obj. It's
# useful to call this from KickstartCommand subclasses that handle lists
# of objects (like partitions, network devices, etc.) and need to populate
# a Data object.
def _setToObj(self, optParser, opts, obj):
for key in filter (lambda k: getattr(opts, k) != None, optParser.keys()):
setattr(obj, key, getattr(opts, key))
class DeprecatedCommand(KickstartCommand):
"""Specify that a command is deprecated and no longer has any function.
Any command that is deprecated should be subclassed from this class,
only specifying an __init__ method that calls the superclass's __init__.
This is an abstract class.
"""
def __init__(self, writePriority=None, *args, **kwargs):
# We don't want people using this class by itself.
if self.__class__ is KickstartCommand:
raise TypeError, "DeprecatedCommand is an abstract class."
# Create a new DeprecatedCommand instance.
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
def __str__(self):
"""Placeholder since DeprecatedCommands don't work anymore."""
return ""
def parse(self, args):
"""Print a warning message if the command is seen in the input file."""
mapping = {"lineno": self.lineno, "cmd": self.currentCmd}
warnings.warn(_("Ignoring deprecated command on line %(lineno)s: The %(cmd)s command has been deprecated and no longer has any effect. It may be removed from future releases, which will result in a fatal error from kickstart. Please modify your kickstart file to remove this command.") % mapping, DeprecationWarning)
###
### HANDLERS
###
class BaseHandler(KickstartObject):
"""Each version of kickstart syntax is provided by a subclass of this
class. These subclasses are what users will interact with for parsing,
extracting data, and writing out kickstart files. This is an abstract
class.
version -- The version this syntax handler supports. This is set by
a class attribute of a BaseHandler subclass and is used to
set up the command dict. It is for read-only use.
"""
version = None
def __init__(self, mapping=None, dataMapping=None, commandUpdates=None,
dataUpdates=None, *args, **kwargs):
"""Create a new BaseHandler instance. This method must be provided by
all subclasses, but subclasses must call BaseHandler.__init__ first.
mapping -- A custom map from command strings to classes,
useful when creating your own handler with
special command objects. It is otherwise unused
and rarely needed. If you give this argument,
the mapping takes the place of the default one
and so must include all commands you want
recognized.
dataMapping -- This is the same as mapping, but for data
objects. All the same comments apply.
commandUpdates -- This is similar to mapping, but does not take
the place of the defaults entirely. Instead,
this mapping is applied after the defaults and
updates it with just the commands you want to
modify.
dataUpdates -- This is the same as commandUpdates, but for
data objects.
Instance attributes:
commands -- A mapping from a string command to a KickstartCommand
subclass object that handles it. Multiple strings can
map to the same object, but only one instance of the
command object should ever exist. Most users should
never have to deal with this directly, as it is
manipulated internally and called through dispatcher.
currentLine -- The current unprocessed line from the input file
that caused this handler to be run.
packages -- An instance of pykickstart.parser.Packages which
describes the packages section of the input file.
platform -- A string describing the hardware platform, which is
needed only by system-config-kickstart.
scripts -- A list of pykickstart.parser.Script instances, which is
populated by KickstartParser.addScript and describes the
%pre/%post/%traceback script section of the input file.
"""
# We don't want people using this class by itself.
if self.__class__ is BaseHandler:
raise TypeError, "BaseHandler is an abstract class."
KickstartObject.__init__(self, *args, **kwargs)
# This isn't really a good place for these, but it's better than
# everything else I can think of.
self.scripts = []
self.packages = Packages()
self.platform = ""
# These will be set by the dispatcher.
self.commands = {}
self.currentLine = 0
# A dict keyed by an integer priority number, with each value being a
# list of KickstartCommand subclasses. This dict is maintained by
# registerCommand and used in __str__. No one else should be touching
# it.
self._writeOrder = {}
self._registerCommands(mapping, dataMapping, commandUpdates, dataUpdates)
def __str__(self):
"""Return a string formatted for output to a kickstart file."""
retval = ""
if self.platform != "":
retval += "#platform=%s\n" % self.platform
retval += "#version=%s\n" % versionToString(self.version)
lst = self._writeOrder.keys()
lst.sort()
for prio in lst:
for obj in self._writeOrder[prio]:
retval += obj.__str__()
for script in self.scripts:
retval += script.__str__()
retval += self.packages.__str__()
return retval
def _insertSorted(self, lst, obj):
length = len(lst)
i = 0
while i < length:
# If the two classes have the same name, it's because we are
# overriding an existing class with one from a later kickstart
# version, so remove the old one in favor of the new one.
if obj.__class__.__name__ > lst[i].__class__.__name__:
i += 1
elif obj.__class__.__name__ == lst[i].__class__.__name__:
lst[i] = obj
return
elif obj.__class__.__name__ < lst[i].__class__.__name__:
break
if i >= length:
lst.append(obj)
else:
lst.insert(i, obj)
def _setCommand(self, cmdObj):
# Add an attribute on this version object. We need this to provide a
# way for clients to access the command objects. We also need to strip
# off the version part from the front of the name.
if cmdObj.__class__.__name__.find("_") != -1:
name = unicode(cmdObj.__class__.__name__.split("_", 1)[1])
else:
name = unicode(cmdObj.__class__.__name__).lower()
setattr(self, name.lower(), cmdObj)
# Also, add the object into the _writeOrder dict in the right place.
if cmdObj.writePriority is not None:
if self._writeOrder.has_key(cmdObj.writePriority):
self._insertSorted(self._writeOrder[cmdObj.writePriority], cmdObj)
else:
self._writeOrder[cmdObj.writePriority] = [cmdObj]
def _registerCommands(self, mapping=None, dataMapping=None, commandUpdates=None,
dataUpdates=None):
if mapping == {} or mapping == None:
from pykickstart.handlers.control import commandMap
cMap = commandMap[self.version]
else:
cMap = mapping
if dataMapping == {} or dataMapping == None:
from pykickstart.handlers.control import dataMap
dMap = dataMap[self.version]
else:
dMap = dataMapping
if type(commandUpdates) == types.DictType:
cMap.update(commandUpdates)
if type(dataUpdates) == types.DictType:
dMap.update(dataUpdates)
for (cmdName, cmdClass) in cMap.iteritems():
# First make sure we haven't instantiated this command handler
# already. If we have, we just need to make another mapping to
# it in self.commands.
cmdObj = None
for (key, val) in self.commands.iteritems():
if val.__class__.__name__ == cmdClass.__name__:
cmdObj = val
break
# If we didn't find an instance in self.commands, create one now.
if cmdObj == None:
cmdObj = cmdClass()
self._setCommand(cmdObj)
# Finally, add the mapping to the commands dict.
self.commands[cmdName] = cmdObj
self.commands[cmdName].handler = self
# We also need to create attributes for the various data objects.
# No checks here because dMap is a bijection. At least, that's what
# the comment says. Hope no one screws that up.
for (dataName, dataClass) in dMap.iteritems():
setattr(self, dataName, dataClass)
def dispatcher(self, args, lineno):
"""Call the appropriate KickstartCommand handler for the current line
in the kickstart file. A handler for the current command should
be registered, though a handler of None is not an error. Returns
the data object returned by KickstartCommand.parse.
args -- A list of arguments to the current command
lineno -- The line number in the file, for error reporting
"""
cmd = args[0]
if not self.commands.has_key(cmd):
raise KickstartParseError, formatErrorMsg(lineno, msg=_("Unknown command: %s" % cmd))
elif self.commands[cmd] != None:
self.commands[cmd].currentCmd = cmd
self.commands[cmd].currentLine = self.currentLine
self.commands[cmd].lineno = lineno
# The parser returns the data object that was modified. This could
# be a BaseData subclass that should be put into a list, or it
# could be the command handler object itself.
obj = self.commands[cmd].parse(args[1:])
lst = self.commands[cmd].dataList()
if lst is not None:
lst.append(obj)
return obj
def maskAllExcept(self, lst):
"""Set all entries in the commands dict to None, except the ones in
the lst. All other commands will not be processed.
"""
self._writeOrder = {}
for (key, val) in self.commands.iteritems():
if not key in lst:
self.commands[key] = None
def hasCommand(self, cmd):
"""Return true if there is a handler for the string cmd."""
return hasattr(self, cmd)
###
### DATA
###
class BaseData(KickstartObject):
"""The base class for all data objects. This is an abstract class."""
removedKeywords = []
removedAttrs = []
def __init__(self, *args, **kwargs):
"""Create a new BaseData instance.
lineno -- Line number in the ks-file where this object was defined
"""
# We don't want people using this class by itself.
if self.__class__ is BaseData:
raise TypeError, "BaseData is an abstract class."
KickstartObject.__init__(self, *args, **kwargs)
self.lineno = 0
def __str__(self):
"""Return a string formatted for output to a kickstart file."""
return ""
def __call__(self, *args, **kwargs):
"""Set multiple attributes on a subclass of BaseData at once via
keyword arguments. Valid attributes are anything specified in a
subclass, but unknown attributes will be ignored.
"""
for (key, val) in kwargs.items():
# Ignore setting attributes that were removed in a subclass, as
# if they were unknown attributes.
if key in self.removedAttrs:
continue
if hasattr(self, key):
setattr(self, key, val)
def deleteRemovedAttrs(self):
"""Remove all attributes from self that are given in the removedAttrs
list. This method should be called from __init__ in a subclass,
but only after the superclass's __init__ method has been called.
"""
for attr in filter(lambda k: hasattr(self, k), self.removedAttrs):
delattr(self, attr)
|
supersven/intellij-community
|
refs/heads/master
|
python/testData/codeInsight/smartEnter/docTypeRType_after.py
|
83
|
def foo(a):
"""
<caret>
@param a:
@type a:
@return:
@rtype:
"""
pass
def foo1():
"""
:return :
"""
|
xinjiguaike/edx-platform
|
refs/heads/master
|
common/lib/xmodule/xmodule/assetstore/__init__.py
|
124
|
"""
Classes representing asset metadata.
"""
from datetime import datetime
import dateutil.parser
import pytz
import json
from contracts import contract, new_contract
from opaque_keys.edx.keys import CourseKey, AssetKey
from lxml import etree
new_contract('AssetKey', AssetKey)
new_contract('CourseKey', CourseKey)
new_contract('datetime', datetime)
new_contract('basestring', basestring)
new_contract('long', long)
new_contract('AssetElement', lambda x: isinstance(x, etree._Element) and x.tag == "asset") # pylint: disable=protected-access
new_contract('AssetsElement', lambda x: isinstance(x, etree._Element) and x.tag == "assets") # pylint: disable=protected-access
class AssetMetadata(object):
"""
Stores the metadata associated with a particular course asset. The asset metadata gets stored
in the modulestore.
"""
TOP_LEVEL_ATTRS = ['pathname', 'internal_name', 'locked', 'contenttype', 'thumbnail', 'fields']
EDIT_INFO_ATTRS = ['curr_version', 'prev_version', 'edited_by', 'edited_by_email', 'edited_on']
CREATE_INFO_ATTRS = ['created_by', 'created_by_email', 'created_on']
ATTRS_ALLOWED_TO_UPDATE = TOP_LEVEL_ATTRS + EDIT_INFO_ATTRS
ASSET_TYPE_ATTR = 'type'
ASSET_BASENAME_ATTR = 'filename'
XML_ONLY_ATTRS = [ASSET_TYPE_ATTR, ASSET_BASENAME_ATTR]
XML_ATTRS = XML_ONLY_ATTRS + ATTRS_ALLOWED_TO_UPDATE + CREATE_INFO_ATTRS
# Type for assets uploaded by a course author in Studio.
GENERAL_ASSET_TYPE = 'asset'
# Asset section XML tag for asset metadata as XML.
ALL_ASSETS_XML_TAG = 'assets'
# Individual asset XML tag for asset metadata as XML.
ASSET_XML_TAG = 'asset'
# Top-level directory name in exported course XML which holds asset metadata.
EXPORTED_ASSET_DIR = 'assets'
# Filename of all asset metadata exported as XML.
EXPORTED_ASSET_FILENAME = 'assets.xml'
@contract(asset_id='AssetKey',
pathname='basestring|None', internal_name='basestring|None',
locked='bool|None', contenttype='basestring|None',
thumbnail='basestring|None', fields='dict|None',
curr_version='basestring|None', prev_version='basestring|None',
created_by='int|long|None', created_by_email='basestring|None', created_on='datetime|None',
edited_by='int|long|None', edited_by_email='basestring|None', edited_on='datetime|None')
def __init__(self, asset_id,
pathname=None, internal_name=None,
locked=None, contenttype=None,
thumbnail=None, fields=None,
curr_version=None, prev_version=None,
created_by=None, created_by_email=None, created_on=None,
edited_by=None, edited_by_email=None, edited_on=None,
field_decorator=None,):
"""
Construct a AssetMetadata object.
Arguments:
asset_id (AssetKey): Key identifying this particular asset.
pathname (str): Original path to file at asset upload time.
internal_name (str): Name, url, or handle for the storage system to access the file.
locked (bool): If True, only course participants can access the asset.
contenttype (str): MIME type of the asset.
thumbnail (str): the internal_name for the thumbnail if one exists
fields (dict): fields to save w/ the metadata
curr_version (str): Current version of the asset.
prev_version (str): Previous version of the asset.
created_by (int): User ID of initial user to upload this asset.
created_by_email (str): Email address of initial user to upload this asset.
created_on (datetime): Datetime of intial upload of this asset.
edited_by (int): User ID of last user to upload this asset.
edited_by_email (str): Email address of last user to upload this asset.
edited_on (datetime): Datetime of last upload of this asset.
field_decorator (function): used by strip_key to convert OpaqueKeys to the app's understanding.
Not saved.
"""
self.asset_id = asset_id if field_decorator is None else field_decorator(asset_id)
self.pathname = pathname # Path w/o filename.
self.internal_name = internal_name
self.locked = locked
self.contenttype = contenttype
self.thumbnail = thumbnail
self.curr_version = curr_version
self.prev_version = prev_version
now = datetime.now(pytz.utc)
self.edited_by = edited_by
self.edited_by_email = edited_by_email
self.edited_on = edited_on or now
# created_by, created_by_email, and created_on should only be set here.
self.created_by = created_by
self.created_by_email = created_by_email
self.created_on = created_on or now
self.fields = fields or {}
def __repr__(self):
return """AssetMetadata{!r}""".format((
self.asset_id,
self.pathname, self.internal_name,
self.locked, self.contenttype, self.fields,
self.curr_version, self.prev_version,
self.created_by, self.created_by_email, self.created_on,
self.edited_by, self.edited_by_email, self.edited_on,
))
def update(self, attr_dict):
"""
Set the attributes on the metadata. Any which are not in ATTRS_ALLOWED_TO_UPDATE get put into
fields.
Arguments:
attr_dict: Prop, val dictionary of all attributes to set.
"""
for attr, val in attr_dict.iteritems():
if attr in self.ATTRS_ALLOWED_TO_UPDATE:
setattr(self, attr, val)
else:
self.fields[attr] = val
def to_storable(self):
"""
Converts metadata properties into a MongoDB-storable dict.
"""
return {
'filename': self.asset_id.path,
'asset_type': self.asset_id.asset_type,
'pathname': self.pathname,
'internal_name': self.internal_name,
'locked': self.locked,
'contenttype': self.contenttype,
'thumbnail': self.thumbnail,
'fields': self.fields,
'edit_info': {
'curr_version': self.curr_version,
'prev_version': self.prev_version,
'created_by': self.created_by,
'created_by_email': self.created_by_email,
'created_on': self.created_on,
'edited_by': self.edited_by,
'edited_by_email': self.edited_by_email,
'edited_on': self.edited_on
}
}
@contract(asset_doc='dict|None')
def from_storable(self, asset_doc):
"""
Fill in all metadata fields from a MongoDB document.
The asset_id prop is initialized upon construction only.
"""
if asset_doc is None:
return
self.pathname = asset_doc['pathname']
self.internal_name = asset_doc['internal_name']
self.locked = asset_doc['locked']
self.contenttype = asset_doc['contenttype']
self.thumbnail = asset_doc['thumbnail']
self.fields = asset_doc['fields']
self.curr_version = asset_doc['edit_info']['curr_version']
self.prev_version = asset_doc['edit_info']['prev_version']
self.created_by = asset_doc['edit_info']['created_by']
self.created_by_email = asset_doc['edit_info']['created_by_email']
self.created_on = asset_doc['edit_info']['created_on']
self.edited_by = asset_doc['edit_info']['edited_by']
self.edited_by_email = asset_doc['edit_info']['edited_by_email']
self.edited_on = asset_doc['edit_info']['edited_on']
@contract(node='AssetElement')
def from_xml(self, node):
"""
Walk the etree XML node and fill in the asset metadata.
The node should be a top-level "asset" element.
"""
for child in node:
qname = etree.QName(child)
tag = qname.localname
if tag in self.XML_ATTRS:
value = child.text
if tag in self.XML_ONLY_ATTRS:
# An AssetLocator is constructed separately from these parts.
continue
elif tag == 'locked':
# Boolean.
value = True if value == "true" else False
elif value == 'None':
# None.
value = None
elif tag in ('created_on', 'edited_on'):
# ISO datetime.
value = dateutil.parser.parse(value)
elif tag in ('created_by', 'edited_by'):
# Integer representing user id.
value = int(value)
elif tag == 'fields':
# Dictionary.
value = json.loads(value)
setattr(self, tag, value)
@contract(node='AssetElement')
def to_xml(self, node):
"""
Add the asset data as XML to the passed-in node.
The node should already be created as a top-level "asset" element.
"""
for attr in self.XML_ATTRS:
child = etree.SubElement(node, attr)
# Get the value.
if attr == self.ASSET_TYPE_ATTR:
value = self.asset_id.asset_type
elif attr == self.ASSET_BASENAME_ATTR:
value = self.asset_id.path
else:
value = getattr(self, attr)
# Format the value.
if isinstance(value, bool):
value = "true" if value else "false"
elif isinstance(value, datetime):
value = value.isoformat()
elif isinstance(value, dict):
value = json.dumps(value)
else:
value = unicode(value)
child.text = value
@staticmethod
@contract(node='AssetsElement', assets=list)
def add_all_assets_as_xml(node, assets):
"""
Take a list of AssetMetadata objects. Add them all to the node.
The node should already be created as a top-level "assets" element.
"""
for asset in assets:
asset_node = etree.SubElement(node, "asset")
asset.to_xml(asset_node)
class CourseAssetsFromStorage(object):
"""
Wrapper class for asset metadata lists returned from modulestore storage.
"""
@contract(course_id='CourseKey', asset_md=dict)
def __init__(self, course_id, doc_id, asset_md):
"""
Params:
course_id: Course ID for which the asset metadata is stored.
doc_id: ObjectId of MongoDB document
asset_md: Dict with asset types as keys and lists of storable asset metadata as values.
"""
self.course_id = course_id
self._doc_id = doc_id
self.asset_md = asset_md
@property
def doc_id(self):
"""
Returns the ID associated with the MongoDB document which stores these course assets.
"""
return self._doc_id
def setdefault(self, item, default=None):
"""
Provides dict-equivalent setdefault functionality.
"""
return self.asset_md.setdefault(item, default)
def __getitem__(self, item):
return self.asset_md[item]
def __delitem__(self, item):
del self.asset_md[item]
def __len__(self):
return len(self.asset_md)
def __setitem__(self, key, value):
self.asset_md[key] = value
def get(self, item, default=None):
"""
Provides dict-equivalent get functionality.
"""
return self.asset_md.get(item, default)
def iteritems(self):
"""
Iterates over the items of the asset dict.
"""
return self.asset_md.iteritems()
|
felipebetancur/scipy
|
refs/heads/master
|
scipy/weave/tests/test_blitz_tools.py
|
91
|
from __future__ import absolute_import, print_function
import time
import parser
import warnings
from numpy import (float32, float64, complex64, complex128,
zeros, random, array)
from numpy.testing import (TestCase, assert_equal,
assert_allclose, run_module_suite)
from scipy.weave import blitz_tools, blitz, BlitzWarning
from scipy.weave.ast_tools import harvest_variables
from weave_test_utils import remove_whitespace, debug_print, TempdirBlitz, dec
class TestAstToBlitzExpr(TestCase):
def generic_check(self,expr,desired):
ast = parser.suite(expr)
ast_list = ast.tolist()
actual = blitz_tools.ast_to_blitz_expr(ast_list)
actual = remove_whitespace(actual)
desired = remove_whitespace(desired)
assert_equal(actual,desired,expr)
def test_simple_expr(self):
# convert simple expr to blitz
expr = "a[:1:2] = b[:1+i+2:]"
desired = "a(blitz::Range(_beg,1-1,2))="\
"b(blitz::Range(_beg,1+i+2-1));"
self.generic_check(expr,desired)
def test_fdtd_expr(self):
# Convert fdtd equation to blitz.
# Note: This really should have "\" at the end of each line to
# indicate continuation.
expr = "ex[:,1:,1:] = ca_x[:,1:,1:] * ex[:,1:,1:]" \
"+ cb_y_x[:,1:,1:] * (hz[:,1:,1:] - hz[:,:-1,:])"\
"- cb_z_x[:,1:,1:] * (hy[:,1:,1:] - hy[:,1:,:-1])"
desired = 'ex(_all,blitz::Range(1,_end),blitz::Range(1,_end))='\
' ca_x(_all,blitz::Range(1,_end),blitz::Range(1,_end))'\
' *ex(_all,blitz::Range(1,_end),blitz::Range(1,_end))'\
'+cb_y_x(_all,blitz::Range(1,_end),blitz::Range(1,_end))'\
'*(hz(_all,blitz::Range(1,_end),blitz::Range(1,_end))'\
' -hz(_all,blitz::Range(_beg,Nhz(1)-1-1),_all))'\
' -cb_z_x(_all,blitz::Range(1,_end),blitz::Range(1,_end))'\
'*(hy(_all,blitz::Range(1,_end),blitz::Range(1,_end))'\
'-hy(_all,blitz::Range(1,_end),blitz::Range(_beg,Nhy(2)-1-1)));'
self.generic_check(expr,desired)
class TestBlitz(TestCase):
"""These are long running tests...
Would be useful to benchmark these things somehow.
"""
def generic_check(self, expr, arg_dict, type, size):
clean_result = array(arg_dict['result'],copy=1)
t1 = time.time()
exec(expr, globals(),arg_dict)
t2 = time.time()
standard = t2 - t1
desired = arg_dict['result']
arg_dict['result'] = clean_result
t1 = time.time()
blitz_tools.blitz(expr,arg_dict,{},verbose=0)
t2 = time.time()
compiled = t2 - t1
actual = arg_dict['result']
# TODO: this isn't very stringent. Need to tighten this up and
# learn where failures are occurring.
assert_allclose(abs(actual.ravel()), abs(desired.ravel()),
rtol=1e-4, atol=1e-6)
return standard, compiled
def generic_2d(self,expr,typ):
# The complex testing is pretty lame...
ast = parser.suite(expr)
arg_list = harvest_variables(ast.tolist())
all_sizes = [(10,10), (50,50), (100,100), (500,500), (1000,1000)]
debug_print('\nExpression:', expr)
with TempdirBlitz():
for size in all_sizes:
arg_dict = {}
for arg in arg_list:
arg_dict[arg] = random.normal(0,1,size).astype(typ)
# set imag part of complex values to non-zero value
try:
arg_dict[arg].imag = arg_dict[arg].real
except:
pass
debug_print('Run:', size,typ)
standard,compiled = self.generic_check(expr,arg_dict,type,size)
try:
speed_up = standard/compiled
except:
speed_up = -1.
debug_print("1st run(numpy,compiled,speed up): %3.4f, %3.4f, "
"%3.4f" % (standard,compiled,speed_up))
standard,compiled = self.generic_check(expr,arg_dict,type,size)
try:
speed_up = standard/compiled
except:
speed_up = -1.
debug_print("2nd run(numpy,compiled,speed up): %3.4f, %3.4f, "
"%3.4f" % (standard,compiled,speed_up))
@dec.slow
def test_5point_avg_2d_float(self):
expr = "result[1:-1,1:-1] = (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]" \
"+ b[1:-1,2:] + b[1:-1,:-2]) / 5."
self.generic_2d(expr,float32)
@dec.slow
def test_5point_avg_2d_double(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=BlitzWarning)
expr = "result[1:-1,1:-1] = (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]" \
"+ b[1:-1,2:] + b[1:-1,:-2]) / 5."
self.generic_2d(expr,float64)
@dec.slow
def _check_5point_avg_2d_complex_float(self):
""" Note: THIS TEST is KNOWN TO FAIL ON GCC 3.x.
It will not adversely affect 99.99 percent of weave
result[1:-1,1:-1] = (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]
+ b[1:-1,2:] + b[1:-1,:-2]) / 5.
Note: THIS TEST is KNOWN TO FAIL ON GCC 3.x. The reason is that
5. is a double and b is a complex32. blitz doesn't know
how to handle complex32/double. See:
http://www.oonumerics.org/MailArchives/blitz-support/msg00541.php
Unfortunately, the fix isn't trivial. Instead of fixing it, I
prefer to wait until we replace blitz++ with Pat Miller's code
that doesn't rely on blitz..
"""
expr = "result[1:-1,1:-1] = (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]" \
"+ b[1:-1,2:] + b[1:-1,:-2]) / 5."
self.generic_2d(expr,complex64)
@dec.slow
def test_5point_avg_2d_complex_double(self):
expr = "result[1:-1,1:-1] = (b[1:-1,1:-1] + b[2:,1:-1] + b[:-2,1:-1]" \
"+ b[1:-1,2:] + b[1:-1,:-2]) / 5."
self.generic_2d(expr,complex128)
@dec.slow
def test_blitz_bug():
# Assignment to arr[i:] used to fail inside blitz expressions.
with TempdirBlitz():
N = 4
expr_buggy = 'arr_blitz_buggy[{0}:] = arr[{0}:]'
expr_not_buggy = 'arr_blitz_not_buggy[{0}:{1}] = arr[{0}:]'
random.seed(7)
arr = random.randn(N)
sh = arr.shape[0]
for lim in [0, 1, 2]:
arr_blitz_buggy = zeros(N)
arr_blitz_not_buggy = zeros(N)
arr_np = zeros(N)
blitz(expr_buggy.format(lim))
blitz(expr_not_buggy.format(lim, 'sh'))
arr_np[lim:] = arr[lim:]
assert_allclose(arr_blitz_buggy, arr_np)
assert_allclose(arr_blitz_not_buggy, arr_np)
if __name__ == "__main__":
run_module_suite()
|
faun/django_test
|
refs/heads/master
|
build/lib/django/test/testcases.py
|
3
|
import re
import sys
from urlparse import urlsplit, urlunsplit
from xml.dom.minidom import parseString, Node
from django.conf import settings
from django.core import mail
from django.core.management import call_command
from django.core.urlresolvers import clear_url_caches
from django.db import transaction, connection, connections, DEFAULT_DB_ALIAS
from django.http import QueryDict
from django.test import _doctest as doctest
from django.test.client import Client
from django.utils import simplejson, unittest as ut2
from django.utils.encoding import smart_str
from django.utils.functional import wraps
__all__ = ('DocTestRunner', 'OutputChecker', 'TestCase', 'TransactionTestCase',
'skipIfDBFeature', 'skipUnlessDBFeature')
try:
all
except NameError:
from django.utils.itercompat import all
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
normalize_decimals = lambda s: re.sub(r"Decimal\('(\d+(\.\d*)?)'\)", lambda m: "Decimal(\"%s\")" % m.groups()[0], s)
def to_list(value):
"""
Puts value into a list if it's not already one.
Returns an empty list if value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
real_commit = transaction.commit
real_rollback = transaction.rollback
real_enter_transaction_management = transaction.enter_transaction_management
real_leave_transaction_management = transaction.leave_transaction_management
real_managed = transaction.managed
def nop(*args, **kwargs):
return
def disable_transaction_methods():
transaction.commit = nop
transaction.rollback = nop
transaction.enter_transaction_management = nop
transaction.leave_transaction_management = nop
transaction.managed = nop
def restore_transaction_methods():
transaction.commit = real_commit
transaction.rollback = real_rollback
transaction.enter_transaction_management = real_enter_transaction_management
transaction.leave_transaction_management = real_leave_transaction_management
transaction.managed = real_managed
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
"The entry method for doctest output checking. Defers to a sequence of child checkers"
checks = (self.check_output_default,
self.check_output_numeric,
self.check_output_xml,
self.check_output_json)
for check in checks:
if check(want, got, optionflags):
return True
return False
def check_output_default(self, want, got, optionflags):
"The default comparator provided by doctest - not perfect, but good for most purposes"
return doctest.OutputChecker.check_output(self, want, got, optionflags)
def check_output_numeric(self, want, got, optionflags):
"""Doctest does an exact string comparison of output, which means that
some numerically equivalent values aren't equal. This check normalizes
* long integers (22L) so that they equal normal integers. (22)
* Decimals so that they are comparable, regardless of the change
made to __repr__ in Python 2.6.
"""
return doctest.OutputChecker.check_output(self,
normalize_decimals(normalize_long_ints(want)),
normalize_decimals(normalize_long_ints(got)),
optionflags)
def check_output_xml(self, want, got, optionsflags):
"""Tries to do a 'xml-comparision' of want and got. Plain string
comparision doesn't always work because, for example, attribute
ordering should not be important.
Based on http://codespeak.net/svn/lxml/trunk/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+')
def norm_whitespace(v):
return _norm_whitespace_re.sub(' ', v)
def child_text(element):
return ''.join([c.data for c in element.childNodes
if c.nodeType == Node.TEXT_NODE])
def children(element):
return [c for c in element.childNodes
if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
for want, got in zip(want_children, got_children):
if not check_element(want, got):
return False
return True
want, got = self._strip_quotes(want, got)
want = want.replace('\\n','\n')
got = got.replace('\\n','\n')
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith('<?xml'):
wrapper = '<root>%s</root>'
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
try:
want_root = parseString(want).firstChild
got_root = parseString(got).firstChild
except:
return False
return check_element(want_root, got_root)
def check_output_json(self, want, got, optionsflags):
"Tries to compare want and got as if they were JSON-encoded data"
want, got = self._strip_quotes(want, got)
try:
want_json = simplejson.loads(want)
got_json = simplejson.loads(got)
except:
return False
return want_json == got_json
def _strip_quotes(self, want, got):
"""
Strip quotes of doctests output values:
>>> o = OutputChecker()
>>> o._strip_quotes("'foo'")
"foo"
>>> o._strip_quotes('"foo"')
"foo"
>>> o._strip_quotes("u'foo'")
"foo"
>>> o._strip_quotes('u"foo"')
"foo"
"""
def is_quoted_string(s):
s = s.strip()
return (len(s) >= 2
and s[0] == s[-1]
and s[0] in ('"', "'"))
def is_quoted_unicode(s):
s = s.strip()
return (len(s) >= 3
and s[0] == 'u'
and s[1] == s[-1]
and s[1] in ('"', "'"))
if is_quoted_string(want) and is_quoted_string(got):
want = want.strip()[1:-1]
got = got.strip()[1:-1]
elif is_quoted_unicode(want) and is_quoted_unicode(got):
want = want.strip()[2:-1]
got = got.strip()[2:-1]
return want, got
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
def report_unexpected_exception(self, out, test, example, exc_info):
doctest.DocTestRunner.report_unexpected_exception(self, out, test,
example, exc_info)
# Rollback, in case of database errors. Otherwise they'd have
# side effects on other tests.
for conn in connections:
transaction.rollback_unless_managed(using=conn)
class _AssertNumQueriesContext(object):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
self.connection = connection
def __enter__(self):
self.old_debug_cursor = self.connection.use_debug_cursor
self.connection.use_debug_cursor = True
self.starting_queries = len(self.connection.queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.use_debug_cursor = self.old_debug_cursor
if exc_type is not None:
return
final_queries = len(self.connection.queries)
executed = final_queries - self.starting_queries
self.test_case.assertEqual(
executed, self.num, "%d queries executed, %d expected" % (
executed, self.num
)
)
class TransactionTestCase(ut2.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* Flushing the database.
* If the Test Case class has a 'fixtures' member, installing the
named fixtures.
* If the Test Case class has a 'urls' member, replace the
ROOT_URLCONF with it.
* Clearing the mail test outbox.
"""
self._fixture_setup()
self._urlconf_setup()
mail.outbox = []
def _fixture_setup(self):
# If the test case has a multi_db=True flag, flush all databases.
# Otherwise, just flush default.
if getattr(self, 'multi_db', False):
databases = connections
else:
databases = [DEFAULT_DB_ALIAS]
for db in databases:
call_command('flush', verbosity=0, interactive=False, database=db)
if hasattr(self, 'fixtures'):
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures, **{'verbosity': 0, 'database': db})
def _urlconf_setup(self):
if hasattr(self, 'urls'):
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = self.urls
clear_url_caches()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
self.client = self.client_class()
try:
self._pre_setup()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
super(TransactionTestCase, self).__call__(result)
try:
self._post_teardown()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
def _post_teardown(self):
""" Performs any post-test things. This includes:
* Putting back the original ROOT_URLCONF if it was changed.
* Force closing the connection, so that the next test gets
a clean cursor.
"""
self._fixture_teardown()
self._urlconf_teardown()
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does rollback, the effect
# of these statements is lost, which can effect the operation
# of tests (e.g., losing a timezone setting causing objects to
# be created with the wrong time).
# To make sure this doesn't happen, get a clean connection at the
# start of every test.
for connection in connections.all():
connection.close()
def _fixture_teardown(self):
pass
def _urlconf_teardown(self):
if hasattr(self, '_old_root_urlconf'):
settings.ROOT_URLCONF = self._old_root_urlconf
clear_url_caches()
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, host=None, msg_prefix=''):
"""Asserts that a response redirected to a specific URL, and that the
redirect URL can be loaded.
Note that assertRedirects won't work for external links since it uses
TestClient to do a request.
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.failUnless(len(response.redirect_chain) > 0,
msg_prefix + "Response didn't redirect as expected: Response"
" code was %d (expected %d)" %
(response.status_code, status_code))
self.assertEqual(response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected:"
" Response code was %d (expected %d)" %
(response.redirect_chain[0][1], status_code))
url, status_code = response.redirect_chain[-1]
self.assertEqual(response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final"
" Response code was %d (expected %d)" %
(response.status_code, target_status_code))
else:
# Not a followed redirect
self.assertEqual(response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response"
" code was %d (expected %d)" %
(response.status_code, status_code))
url = response['Location']
scheme, netloc, path, query, fragment = urlsplit(url)
redirect_response = response.client.get(path, QueryDict(query))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s':"
" response code was %d (expected %d)" %
(path, redirect_response.status_code, target_status_code))
e_scheme, e_netloc, e_path, e_query, e_fragment = urlsplit(expected_url)
if not (e_scheme or e_netloc):
expected_url = urlunsplit(('http', host or 'testserver', e_path,
e_query, e_fragment))
self.assertEqual(url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" %
(url, expected_url))
def assertContains(self, response, text, count=None, status_code=200,
msg_prefix=''):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
if msg_prefix:
msg_prefix += ": "
self.assertEqual(response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code))
text = smart_str(text, response._charset)
real_count = response.content.count(text)
if count is not None:
self.assertEqual(real_count, count,
msg_prefix + "Found %d instances of '%s' in response"
" (expected %d)" % (real_count, text, count))
else:
self.failUnless(real_count != 0,
msg_prefix + "Couldn't find '%s' in response" % text)
def assertNotContains(self, response, text, status_code=200,
msg_prefix=''):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` doesn't occurs in the content of the response.
"""
if msg_prefix:
msg_prefix += ": "
self.assertEqual(response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code))
text = smart_str(text, response._charset)
self.assertEqual(response.content.count(text), 0,
msg_prefix + "Response should not contain '%s'" % text)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Asserts that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to "
"render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i,context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.failUnless(err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors)))
elif field in context[form].fields:
self.fail(msg_prefix + "The field '%s' on form '%s'"
" in context %d contains no errors" %
(field, form, i))
else:
self.fail(msg_prefix + "The form '%s' in context %d"
" does not contain the field '%s'" %
(form, i, field))
else:
non_field_errors = context[form].non_field_errors()
self.failUnless(err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors))
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the"
" response" % form)
def assertTemplateUsed(self, response, template_name, msg_prefix=''):
"""
Asserts that the template with the provided name was used in rendering
the response.
"""
if msg_prefix:
msg_prefix += ": "
template_names = [t.name for t in response.templates]
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.failUnless(template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s" %
(template_name, u', '.join(template_names)))
def assertTemplateNotUsed(self, response, template_name, msg_prefix=''):
"""
Asserts that the template with the provided name was NOT used in
rendering the response.
"""
if msg_prefix:
msg_prefix += ": "
template_names = [t.name for t in response.templates]
self.failIf(template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering"
" the response" % template_name)
def assertQuerysetEqual(self, qs, values, transform=repr):
return self.assertEqual(map(transform, qs), values)
def assertNumQueries(self, num, func=None, *args, **kwargs):
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
connection = connections[using]
context = _AssertNumQueriesContext(self, num, connection)
if func is None:
return context
# Basically emulate the `with` statement here.
context.__enter__()
try:
func(*args, **kwargs)
except:
context.__exit__(*sys.exc_info())
raise
else:
context.__exit__(*sys.exc_info())
def connections_support_transactions():
"""
Returns True if all connections support transactions. This is messy
because 2.4 doesn't support any or all.
"""
return all(conn.features.supports_transactions
for conn in connections.all())
class TestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase, but surrounds every test
with a transaction, monkey-patches the real transaction management routines to
do nothing, and rollsback the test transaction at the end of the test. You have
to use TransactionTestCase, if you need transaction management inside a test.
"""
def _fixture_setup(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_setup()
# If the test case has a multi_db=True flag, setup all databases.
# Otherwise, just use default.
if getattr(self, 'multi_db', False):
databases = connections
else:
databases = [DEFAULT_DB_ALIAS]
for db in databases:
transaction.enter_transaction_management(using=db)
transaction.managed(True, using=db)
disable_transaction_methods()
from django.contrib.sites.models import Site
Site.objects.clear_cache()
for db in databases:
if hasattr(self, 'fixtures'):
call_command('loaddata', *self.fixtures, **{
'verbosity': 0,
'commit': False,
'database': db
})
def _fixture_teardown(self):
if not connections_support_transactions():
return super(TestCase, self)._fixture_teardown()
# If the test case has a multi_db=True flag, teardown all databases.
# Otherwise, just teardown default.
if getattr(self, 'multi_db', False):
databases = connections
else:
databases = [DEFAULT_DB_ALIAS]
restore_transaction_methods()
for db in databases:
transaction.rollback(using=db)
transaction.leave_transaction_management(using=db)
def _deferredSkip(condition, reason):
def decorator(test_func):
if not (isinstance(test_func, type) and issubclass(test_func, TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if condition():
raise ut2.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
test_item = test_func
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIfDBFeature(feature):
"Skip a test if a database has the named feature"
return _deferredSkip(lambda: getattr(connection.features, feature),
"Database has feature %s" % feature)
def skipUnlessDBFeature(feature):
"Skip a test unless a database has the named feature"
return _deferredSkip(lambda: not getattr(connection.features, feature),
"Database doesn't support feature %s" % feature)
|
acil-bwh/SpearmintServer
|
refs/heads/master
|
SpearmintServer/wsgi.py
|
1
|
"""
WSGI config for SpearmintServer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "SpearmintServer.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
jpopelka/atomic-reactor
|
refs/heads/master
|
atomic_reactor/plugins/post_pulp_sync.py
|
1
|
"""Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
Sync built image to pulp registry using Docker Registry HTTP API V2
Pulp authentication is via a key and certificate. Docker V2 registry
authentication is via a dockercfg file. Both of these sets of
credentials are stored in secrets which the builder service account is
allowed to mount:
$ oc secrets new pulp pulp.key=./pulp.key pulp.cer=./pulp.cer
secret/pulp
$ oc secrets add serviceaccount/builder secret/pulp --for=mount
$ oc secrets new-dockercfg registry-dockercfg [...]
secret/registry-dockercfg
$ oc secrets add serviceaccount/builder secret/registry-dockercfg --for=mount
In the BuildConfig for atomic-reactor, specify the secrets in the
strategy's 'secrets' array, specifying a mount path:
"secrets": [
{
"secretSource": {
"name": "pulp"
},
"mountPath": "/var/run/secrets/pulp"
},
{
"secretSource": {
"name": "registry-dockercfg"
},
"mountPath": "/var/run/secrets/registry-dockercfg"
}
]
In the configuration for this plugin, specify the same path for
pulp_secret_path:
"pulp_sync": {
"pulp_registry_name": ...,
...
"pulp_secret_path": "/var/run/secrets/pulp",
"registry_secret_path": "/var/run/secrets/registry-dockercfg"
}
"""
from __future__ import print_function, unicode_literals
from atomic_reactor.plugin import PostBuildPlugin
from atomic_reactor.util import ImageName, Dockercfg
import dockpulp
import os
import re
# let's silence warnings from dockpulp: there is one warning for every
# request which may result in tens of messages: very annoying.
# with "module", it just prints one warning -- this should balance security
# and UX
from warnings import filterwarnings
filterwarnings("module")
class PulpSyncPlugin(PostBuildPlugin):
key = 'pulp_sync'
is_allowed_to_fail = False
CER = 'pulp.cer'
KEY = 'pulp.key'
def __init__(self, tasker, workflow,
pulp_registry_name,
docker_registry,
delete_from_registry=False,
pulp_secret_path=None,
registry_secret_path=None,
insecure_registry=None,
dockpulp_loglevel=None,
pulp_repo_prefix='redhat-'):
"""
constructor
:param tasker: DockerTasker instance
:param workflow: DockerBuildWorkflow instance
:param pulp_registry_name: str, name of pulp registry to use,
specified in /etc/dockpulp.conf
:param docker_registry: str, URL of docker registry to sync from
including scheme e.g. https://registry.example.com
:param delete_from_registry: bool, whether to delete the image
from the docker v2 registry after sync
:param pulp_secret_path: path to pulp.cer and pulp.key
:param registry_secret_path: path to .dockercfg for the V2 registry
:param insecure_registry: True if SSL validation should be skipped
:param dockpulp_loglevel: int, logging level for dockpulp
:param pulp_repo_prefix: str, prefix for pulp repo IDs
"""
# call parent constructor
super(PulpSyncPlugin, self).__init__(tasker, workflow)
self.pulp_registry_name = pulp_registry_name
self.docker_registry = docker_registry
self.pulp_secret_path = pulp_secret_path
self.registry_secret_path = registry_secret_path
self.insecure_registry = insecure_registry
self.pulp_repo_prefix = pulp_repo_prefix
if dockpulp_loglevel is not None:
logger = dockpulp.setup_logger(dockpulp.log)
try:
logger.setLevel(dockpulp_loglevel)
except (ValueError, TypeError) as ex:
self.log.error("Can't set provided log level %r: %r",
dockpulp_loglevel, ex)
if delete_from_registry:
self.log.error("will not delete from registry as instructed: "
"not implemented")
def set_auth(self, pulp):
path = self.pulp_secret_path
if path is not None:
self.log.info("using configured path %s for secrets", path)
# Work out the pathnames for the certificate/key pair
cer = os.path.join(path, self.CER)
key = os.path.join(path, self.KEY)
if not os.path.exists(cer):
raise RuntimeError("Certificate does not exist")
if not os.path.exists(key):
raise RuntimeError("Key does not exist")
# Tell dockpulp
pulp.set_certs(cer, key)
def get_dockercfg_credentials(self, docker_registry):
"""
Read the .dockercfg file and return an empty dict, or else a dict
with keys 'basic_auth_username' and 'basic_auth_password'.
"""
if not self.registry_secret_path:
return {}
dockercfg = Dockercfg(self.registry_secret_path)
registry_creds = dockercfg.get_credentials(docker_registry)
if 'username' not in registry_creds:
return {}
return {
'basic_auth_username': registry_creds['username'],
'basic_auth_password': registry_creds['password'],
}
def create_repo_if_missing(self, pulp, repo_id, registry_id):
prefixed_repo_id = "{prefix}{id}".format(prefix=self.pulp_repo_prefix,
id=repo_id)
found_repos = pulp.getRepos([prefixed_repo_id], fields=['id'])
found_repo_ids = [repo['id'] for repo in found_repos]
missing_repos = set([prefixed_repo_id]) - set(found_repo_ids)
try:
repo = missing_repos.pop()
except KeyError:
# Already exists
pass
else:
self.log.info("creating repo %s", repo)
pulp.createRepo(prefixed_repo_id, None, registry_id=registry_id,
prefix_with=self.pulp_repo_prefix)
return prefixed_repo_id
def run(self):
pulp = dockpulp.Pulp(env=self.pulp_registry_name)
self.set_auth(pulp)
# We only want the hostname[:port]
hostname_and_port = re.compile(r'^https?://([^/]*)/?.*')
pulp_registry = hostname_and_port.sub(lambda m: m.groups()[0],
pulp.registry)
# Store the registry URI in the push configuration
self.workflow.push_conf.add_pulp_registry(self.pulp_registry_name,
pulp_registry)
self.log.info("syncing from docker V2 registry %s",
self.docker_registry)
docker_registry = hostname_and_port.sub(lambda m: m.groups()[0],
self.docker_registry)
kwargs = self.get_dockercfg_credentials(docker_registry)
if self.insecure_registry is not None:
kwargs['ssl_validation'] = not self.insecure_registry
images = []
repos = {} # pulp repo -> repo id
for image in self.workflow.tag_conf.primary_images:
if image.pulp_repo not in repos:
repo_id = self.create_repo_if_missing(pulp,
image.pulp_repo,
image.repo)
self.log.info("syncing %s", repo_id)
pulp.syncRepo(repo=repo_id,
feed=self.docker_registry,
**kwargs)
repos[image.pulp_repo] = repo_id
images.append(ImageName(registry=pulp_registry,
repo=image.repo))
self.log.info("publishing to crane")
pulp.crane(list(repos.values()), wait=True)
# Return the set of qualified repo names for this image
return images
|
asherbender/mcl
|
refs/heads/master
|
mcl/event/test/test_event.py
|
1
|
import unittest
from mcl.event.event import Event
# -----------------------------------------------------------------------------
# Event()
# -----------------------------------------------------------------------------
class EventTests(unittest.TestCase):
def test_subscribe(self):
"""Test Event() can subscribe callback functions."""
# Create function for capturing event data.
event_data = list()
def callback(data): event_data.append(data)
# Create Event().
event = Event()
# Validate Event() can detect when callbacks have NOT been
# subscribed.
self.assertFalse(event.is_subscribed(callback))
# Validate Event() can detect when callbacks HAVE been subscribed.
self.assertTrue(event.subscribe(callback))
self.assertTrue(event.is_subscribed(callback))
self.assertEqual(event.num_subscriptions(), 1)
# Validate Event() will not re-subscribe callbacks.
self.assertFalse(event.subscribe(callback))
# Test subscribe catches callbacks which do not contain a__call__
# method.
with self.assertRaises(TypeError):
event.subscribe(int())
def test_unsubscribe(self):
"""Test Event() can unsubscribe callback functions."""
# Create function for capturing event data.
event_data = list()
def callback(data): event_data.append(data)
# Create Event().
event = Event()
# Validate Event() can detect when callbacks have been UNsubscribed.
event.subscribe(callback)
self.assertTrue(event.is_subscribed(callback))
self.assertTrue(event.unsubscribe(callback))
self.assertFalse(event.is_subscribed(callback))
self.assertEqual(event.num_subscriptions(), 0)
# Validate Event() will not unsubscribe a callback which does not
# exist.
self.assertFalse(event.unsubscribe(callback))
def test_trigger(self):
"""Test Event() can trigger a callback function."""
# Create function for capturing event data.
event_data = list()
def callback(data): event_data.append(data)
# Trigger an event and send data to callback functions.
test_data = 'test data'
event = Event()
event.subscribe(callback)
event.__trigger__(test_data)
# Ensure data was issued to callback function.
if len(event_data) == 1:
self.assertEqual(event_data[0], test_data)
else:
raise ValueError('Expected one callback event.')
def test_multiple_triggers(self):
"""Test Event() can trigger a callback function multiple times."""
# Create function for capturing event data.
event_data = list()
def callback(data): event_data.append(data)
# Create Event().
event = Event()
event.subscribe(callback)
# Trigger events and send data to callback functions.
num_triggers = 5
for i in range(num_triggers):
event.__trigger__(i)
# Ensure data was issued to callback function.
if len(event_data) == num_triggers:
self.assertEqual(sorted(event_data), range(num_triggers))
else:
raise ValueError('Expected one callback event.')
def test_multiple_subscribers(self):
"""Test Event() can trigger multiple callback functions."""
# Create function for capturing event data.
event_data_A = list()
event_data_B = list()
def callback_A(data): event_data_A.append(data)
def callback_B(data): event_data_B.append(data)
# Trigger an event and send data to multiple callback functions.
event = Event()
test_data = 'test data'
event.subscribe(callback_A)
event.subscribe(callback_B)
# Ensure multiple callbacks have been added to event.
self.assertEqual(event.num_subscriptions(), 2)
# Trigger event.
event.__trigger__(test_data)
# Ensure data was issued to all callback functions.
if (len(event_data_A) == 1) and (len(event_data_B) == 1):
self.assertEqual(event_data_A[0], test_data)
self.assertEqual(event_data_B[0], test_data)
else:
msg = 'Expected all callback functions to receive data.'
raise ValueError(msg)
def test_subscribe_from_callback(self):
"""Test Event() callback functions can be subscribed from callbacks."""
# Create Event().
event = Event()
# Create testing function.
def noop(): pass
# Create function which will subscribe the testing function.
def subscriber():
event.subscribe(noop)
# Subscribe the function which will subscribe another function when
# called.
event.subscribe(subscriber)
self.assertTrue(event.is_subscribed(subscriber))
# Trigger event and ensure testing function was subscribed.
event.__trigger__()
self.assertTrue(event.is_subscribed(noop))
def test_unsubscribe_from_callback(self):
"""Test Event() callback functions can unsubscribe themselves."""
# Create Event().
event = Event()
# Create function which will unsubscribe itself when called.
def unsubscriber():
event.unsubscribe(unsubscriber)
# Subscribe the function which will unsubscribe itself when called.
event.subscribe(unsubscriber)
self.assertTrue(event.is_subscribed(unsubscriber))
# Trigger event and ensure function unsubscribed itself.
event.__trigger__()
self.assertFalse(event.is_subscribed(unsubscriber))
|
gorlemik/selenium
|
refs/heads/master
|
py/test/selenium/webdriver/support/color_tests.py
|
68
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from selenium.webdriver.support.color import Color
class ColorTests(unittest.TestCase):
def test_rgb_to_rgb(self):
rgb = "rgb(1, 2, 3)"
assert Color.from_string(rgb).rgb == rgb
def test_rgb_to_rgba(self):
rgb = "rgb(1, 2, 3)"
assert Color.from_string(rgb).rgba == "rgba(1, 2, 3, 1)"
def test_rgb_pct_to_rgba(self):
rgb = "rgb(10%, 20%, 30%)"
assert Color.from_string(rgb).rgba == "rgba(25, 51, 76, 1)"
def test_rgb_allows_whitespace(self):
rgb = "rgb(\t1, 2 , 3)"
assert Color.from_string(rgb).rgb == "rgb(1, 2, 3)"
def test_rgba_to_rgba(self):
rgba = "rgba(1, 2, 3, 0.5)"
assert Color.from_string(rgba).rgba == rgba
def test_rgba_pct_to_rgba(self):
rgba = "rgba(10%, 20%, 30%, 0.5)"
assert Color.from_string(rgba).rgba == "rgba(25, 51, 76, 0.5)"
def test_hex_to_hex(self):
hex_ = "#ff00a0"
assert Color.from_string(hex_).hex == hex_
def test_hex_to_rgb(self):
hex_ = "#01Ff03"
rgb = "rgb(1, 255, 3)"
assert Color.from_string(hex_).rgb == rgb
def test_hex_to_rgba(self):
hex_ = "#01Ff03"
rgba = "rgba(1, 255, 3, 1)"
assert Color.from_string(hex_).rgba == rgba
hex_ = "#00ff33"
rgba = "rgba(0, 255, 51, 1)"
assert Color.from_string(hex_).rgba == rgba
def test_rgb_to_hex(self):
assert Color.from_string("rgb(1, 255, 3)").hex == "#01ff03"
def test_hex3_to_rgba(self):
assert Color.from_string("#0f3").rgba == "rgba(0, 255, 51, 1)"
def test_hsl_to_rgba(self):
hsl = "hsl(120, 100%, 25%)"
rgba = "rgba(0, 128, 0, 1)"
assert Color.from_string(hsl).rgba == rgba
hsl = "hsl(100, 0%, 50%)"
rgba = "rgba(128, 128, 128, 1)"
assert Color.from_string(hsl).rgba == rgba
def test_hsla_to_rgba(self):
hsla = "hsla(120, 100%, 25%, 1)"
rgba = "rgba(0, 128, 0, 1)"
assert Color.from_string(hsla).rgba == rgba
hsla = "hsla(100, 0%, 50%, 0.5)"
rgba = "rgba(128, 128, 128, 0.5)"
assert Color.from_string(hsla).rgba == rgba
def test_named_color(self):
assert Color.from_string("green").rgba == "rgba(0, 128, 0, 1)"
assert Color.from_string("gray").rgba == "rgba(128, 128, 128, 1)"
assert Color.from_string("aqua").hex == "#00ffff"
assert Color.from_string("transparent").rgba == "rgba(0, 0, 0, 0)"
def test_equals(self):
assert Color.from_string("#f00") == Color.from_string("rgb(255, 0, 0)")
assert Color.from_string("rgba(30, 30, 30, 0.2)") != Color.from_string("rgba(30, 30, 30, 1)")
def test_hash(self):
hash1 = hash(Color.from_string("#f00"))
hash2 = hash(Color.from_string("rgb(255, 0, 0)"))
assert hash1 == hash2
def test_string_representations(self):
hex_ = "#01Ff03"
assert str(Color.from_string(hex_)) == "Color: rgba(1, 255, 3, 1)"
assert repr(Color.from_string(hex_)) == "Color(red=1, green=255, blue=3, alpha=1)"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.