repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
sriprasanna/django-1.3.1
|
refs/heads/master
|
django/conf/locale/te/__init__.py
|
12133432
| |
jnayak1/osf.io
|
refs/heads/develop
|
admin/metrics/migrations/__init__.py
|
12133432
| |
dwightgunning/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex_multi_apps/app2/__init__.py
|
12133432
| |
vahtras/amy
|
refs/heads/master
|
api/__init__.py
|
12133432
| |
Zeken/audacity
|
refs/heads/master
|
lib-src/lv2/sord/waflib/Utils.py
|
181
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,errno,traceback,inspect,re,shutil,datetime,gc
import subprocess
try:
from collections import deque
except ImportError:
class deque(list):
def popleft(self):
return self.pop(0)
try:
import _winreg as winreg
except ImportError:
try:
import winreg
except ImportError:
winreg=None
from waflib import Errors
try:
from collections import UserDict
except ImportError:
from UserDict import UserDict
try:
from hashlib import md5
except ImportError:
try:
from md5 import md5
except ImportError:
pass
try:
import threading
except ImportError:
class threading(object):
pass
class Lock(object):
def acquire(self):
pass
def release(self):
pass
threading.Lock=threading.Thread=Lock
else:
run_old=threading.Thread.run
def run(*args,**kwargs):
try:
run_old(*args,**kwargs)
except(KeyboardInterrupt,SystemExit):
raise
except Exception:
sys.excepthook(*sys.exc_info())
threading.Thread.run=run
SIG_NIL='iluvcuteoverload'
O644=420
O755=493
rot_chr=['\\','|','/','-']
rot_idx=0
try:
from collections import defaultdict
except ImportError:
class defaultdict(dict):
def __init__(self,default_factory):
super(defaultdict,self).__init__()
self.default_factory=default_factory
def __getitem__(self,key):
try:
return super(defaultdict,self).__getitem__(key)
except KeyError:
value=self.default_factory()
self[key]=value
return value
is_win32=sys.platform in('win32','cli')
indicator='\x1b[K%s%s%s\r'
if is_win32 and'NOCOLOR'in os.environ:
indicator='%s%s%s\r'
def readf(fname,m='r',encoding='ISO8859-1'):
if sys.hexversion>0x3000000 and not'b'in m:
m+='b'
f=open(fname,m)
try:
txt=f.read()
finally:
f.close()
txt=txt.decode(encoding)
else:
f=open(fname,m)
try:
txt=f.read()
finally:
f.close()
return txt
def writef(fname,data,m='w',encoding='ISO8859-1'):
if sys.hexversion>0x3000000 and not'b'in m:
data=data.encode(encoding)
m+='b'
f=open(fname,m)
try:
f.write(data)
finally:
f.close()
def h_file(fname):
f=open(fname,'rb')
m=md5()
try:
while fname:
fname=f.read(200000)
m.update(fname)
finally:
f.close()
return m.digest()
if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000:
def readf_win32(f,m='r',encoding='ISO8859-1'):
flags=os.O_NOINHERIT|os.O_RDONLY
if'b'in m:
flags|=os.O_BINARY
if'+'in m:
flags|=os.O_RDWR
try:
fd=os.open(f,flags)
except OSError:
raise IOError('Cannot read from %r'%f)
if sys.hexversion>0x3000000 and not'b'in m:
m+='b'
f=os.fdopen(fd,m)
try:
txt=f.read()
finally:
f.close()
txt=txt.decode(encoding)
else:
f=os.fdopen(fd,m)
try:
txt=f.read()
finally:
f.close()
return txt
def writef_win32(f,data,m='w',encoding='ISO8859-1'):
if sys.hexversion>0x3000000 and not'b'in m:
data=data.encode(encoding)
m+='b'
flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
if'b'in m:
flags|=os.O_BINARY
if'+'in m:
flags|=os.O_RDWR
try:
fd=os.open(f,flags)
except OSError:
raise IOError('Cannot write to %r'%f)
f=os.fdopen(fd,m)
try:
f.write(data)
finally:
f.close()
def h_file_win32(fname):
try:
fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
except OSError:
raise IOError('Cannot read from %r'%fname)
f=os.fdopen(fd,'rb')
m=md5()
try:
while fname:
fname=f.read(200000)
m.update(fname)
finally:
f.close()
return m.digest()
readf_old=readf
writef_old=writef
h_file_old=h_file
readf=readf_win32
writef=writef_win32
h_file=h_file_win32
try:
x=''.encode('hex')
except LookupError:
import binascii
def to_hex(s):
ret=binascii.hexlify(s)
if not isinstance(ret,str):
ret=ret.decode('utf-8')
return ret
else:
def to_hex(s):
return s.encode('hex')
to_hex.__doc__="""
Return the hexadecimal representation of a string
:param s: string to convert
:type s: string
"""
listdir=os.listdir
if is_win32:
def listdir_win32(s):
if not s:
try:
import ctypes
except ImportError:
return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]
else:
dlen=4
maxdrives=26
buf=ctypes.create_string_buffer(maxdrives*dlen)
ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]
if len(s)==2 and s[1]==":":
s+=os.sep
if not os.path.isdir(s):
e=OSError('%s is not a directory'%s)
e.errno=errno.ENOENT
raise e
return os.listdir(s)
listdir=listdir_win32
def num2ver(ver):
if isinstance(ver,str):
ver=tuple(ver.split('.'))
if isinstance(ver,tuple):
ret=0
for i in range(4):
if i<len(ver):
ret+=256**(3-i)*int(ver[i])
return ret
return ver
def ex_stack():
exc_type,exc_value,tb=sys.exc_info()
exc_lines=traceback.format_exception(exc_type,exc_value,tb)
return''.join(exc_lines)
def to_list(sth):
if isinstance(sth,str):
return sth.split()
else:
return sth
re_nl=re.compile('\r*\n',re.M)
def str_to_dict(txt):
tbl={}
lines=re_nl.split(txt)
for x in lines:
x=x.strip()
if not x or x.startswith('#')or x.find('=')<0:
continue
tmp=x.split('=')
tbl[tmp[0].strip()]='='.join(tmp[1:]).strip()
return tbl
def split_path(path):
return path.split('/')
def split_path_cygwin(path):
if path.startswith('//'):
ret=path.split('/')[2:]
ret[0]='/'+ret[0]
return ret
return path.split('/')
re_sp=re.compile('[/\\\\]')
def split_path_win32(path):
if path.startswith('\\\\'):
ret=re.split(re_sp,path)[2:]
ret[0]='\\'+ret[0]
return ret
return re.split(re_sp,path)
if sys.platform=='cygwin':
split_path=split_path_cygwin
elif is_win32:
split_path=split_path_win32
split_path.__doc__="""
Split a path by / or \\. This function is not like os.path.split
:type path: string
:param path: path to split
:return: list of strings
"""
def check_dir(path):
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError ,e:
if not os.path.isdir(path):
raise Errors.WafError('Cannot create the folder %r'%path,ex=e)
def def_attrs(cls,**kw):
for k,v in kw.items():
if not hasattr(cls,k):
setattr(cls,k,v)
def quote_define_name(s):
fu=re.compile("[^a-zA-Z0-9]").sub("_",s)
fu=fu.upper()
return fu
def h_list(lst):
m=md5()
m.update(str(lst))
return m.digest()
def h_fun(fun):
try:
return fun.code
except AttributeError:
try:
h=inspect.getsource(fun)
except IOError:
h="nocode"
try:
fun.code=h
except AttributeError:
pass
return h
reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
def subst_vars(expr,params):
def repl_var(m):
if m.group(1):
return'\\'
if m.group(2):
return'$'
try:
return params.get_flat(m.group(3))
except AttributeError:
return params[m.group(3)]
return reg_subst.sub(repl_var,expr)
def destos_to_binfmt(key):
if key=='darwin':
return'mac-o'
elif key in('win32','cygwin','uwin','msys'):
return'pe'
return'elf'
def unversioned_sys_platform():
s=sys.platform
if s=='java':
from java.lang import System
s=System.getProperty('os.name')
if s=='Mac OS X':
return'darwin'
elif s.startswith('Windows '):
return'win32'
elif s=='OS/2':
return'os2'
elif s=='HP-UX':
return'hpux'
elif s in('SunOS','Solaris'):
return'sunos'
else:s=s.lower()
if s=='powerpc':
return'darwin'
if s=='win32'or s.endswith('os2')and s!='sunos2':return s
return re.split('\d+$',s)[0]
def nada(*k,**kw):
pass
class Timer(object):
def __init__(self):
self.start_time=datetime.datetime.utcnow()
def __str__(self):
delta=datetime.datetime.utcnow()-self.start_time
days=int(delta.days)
hours=delta.seconds//3600
minutes=(delta.seconds-hours*3600)//60
seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
result=''
if days:
result+='%dd'%days
if days or hours:
result+='%dh'%hours
if days or hours or minutes:
result+='%dm'%minutes
return'%s%.3fs'%(result,seconds)
if is_win32:
old=shutil.copy2
def copy2(src,dst):
old(src,dst)
shutil.copystat(src,dst)
setattr(shutil,'copy2',copy2)
if os.name=='java':
try:
gc.disable()
gc.enable()
except NotImplementedError:
gc.disable=gc.enable
def read_la_file(path):
sp=re.compile(r'^([^=]+)=\'(.*)\'$')
dc={}
for line in readf(path).splitlines():
try:
_,left,right,_=sp.split(line.strip())
dc[left]=right
except ValueError:
pass
return dc
def nogc(fun):
def f(*k,**kw):
try:
gc.disable()
ret=fun(*k,**kw)
finally:
gc.enable()
return ret
f.__doc__=fun.__doc__
return f
def run_once(fun):
cache={}
def wrap(k):
try:
return cache[k]
except KeyError:
ret=fun(k)
cache[k]=ret
return ret
wrap.__cache__=cache
return wrap
def get_registry_app_path(key,filename):
if not winreg:
return None
try:
result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0])
except WindowsError:
pass
else:
if os.path.isfile(result):
return result
|
dhruvsrivastava/OJ
|
refs/heads/master
|
python/lib/python2.7/_abcoll.py
|
4
|
/usr/lib/python2.7/_abcoll.py
|
starlightme/python
|
refs/heads/master
|
renzongxian/0023/mysite/guestbook/migrations/__init__.py
|
12133432
| |
GoogleCloudPlatform/professional-services
|
refs/heads/main
|
examples/cloudml-energy-price-forecasting/constants/__init__.py
|
12133432
| |
ambitioninc/django-dynamic-initial-data
|
refs/heads/develop
|
dynamic_initial_data/management/__init__.py
|
12133432
| |
Jgarcia-IAS/SAT
|
refs/heads/master
|
openerp/addons-extra/odoo-pruebas/odoo-server/addons/web_api/__init__.py
|
12133432
| |
cloudwatt/contrail-controller
|
refs/heads/master
|
src/config/utils/del_route_target.py
|
13
|
#!/usr/bin/python
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import argparse
import ConfigParser
from provision_bgp import BgpProvisioner
class MxProvisioner(object):
def __init__(self, args_str=None):
self._args = None
if not args_str:
args_str = ' '.join(sys.argv[1:])
self._parse_args(args_str)
bp_obj = BgpProvisioner(
self._args.admin_user, self._args.admin_password,
self._args.admin_tenant_name,
self._args.api_server_ip, self._args.api_server_port)
bp_obj.del_route_target(self._args.routing_instance_name.split(':'),
self._args.router_asn,
self._args.route_target_number)
# end __init__
def _parse_args(self, args_str):
'''
Eg. python del_route_target.py --routing_instance_name mx1
--router_asn 64512
--api_server_ip 127.0.0.1
--api_server_port 8082
'''
# Source any specified config/ini file
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--conf_file",
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {
'routing_instance_name': 'default-domain:'
'default-project:ip-fabric:__default__',
'route_target_number': '45',
'router_asn': '64513',
'api_server_ip': '127.0.0.1',
'api_server_port': '8082',
'admin_user': None,
'admin_password': None,
'admin_tenant_name': None
}
if args.conf_file:
config = ConfigParser.SafeConfigParser()
config.read([args.conf_file])
defaults.update(dict(config.items("DEFAULTS")))
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
parser.add_argument(
"--routing_instance_name",
help="Colon separated fully qualified name", required=True)
parser.add_argument(
"--route_target_number", help="Route Target for MX interaction", required=True)
parser.add_argument("--router_asn", help="AS Number the MX is in", required=True)
parser.add_argument(
"--api_server_ip", help="IP address of api server", required=True)
parser.add_argument("--api_server_port", help="Port of api server", required=True)
parser.add_argument(
"--admin_user", help="Name of keystone admin user", required=True)
parser.add_argument(
"--admin_password", help="Password of keystone admin user", required=True)
parser.add_argument(
"--admin_tenant_name", help="Tenamt name for keystone admin user", required=True)
self._args = parser.parse_args(remaining_argv)
# end _parse_args
# end class MxProvisioner
def main(args_str=None):
MxProvisioner(args_str)
# end main
if __name__ == "__main__":
main()
|
donckers/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/urls.py
|
6
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com>, 2015
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The match_hostname function and supporting code is under the terms and
# conditions of the Python Software Foundation License. They were taken from
# the Python3 standard library and adapted for use in Python2. See comments in the
# source for which code precisely is under this License. PSF License text
# follows:
#
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
# --------------------------------------------
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
# retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
import netrc
import os
import re
import sys
import socket
import platform
import tempfile
import base64
from ansible.module_utils.basic import get_distribution, get_exception
try:
import httplib
except ImportError:
# Python 3
import http.client as httplib
try:
import urllib2
HAS_URLLIB2 = True
except:
HAS_URLLIB2 = False
try:
import urlparse
HAS_URLPARSE = True
except:
HAS_URLPARSE = False
try:
import ssl
HAS_SSL = True
except:
HAS_SSL = False
try:
# SNI Handling needs python2.7.9's SSLContext
from ssl import create_default_context, SSLContext
HAS_SSLCONTEXT = True
except ImportError:
HAS_SSLCONTEXT = False
try:
try:
from urllib3.contrib.pyopenssl import ssl_wrap_socket
except ImportError:
from requests.packages.urllib3.contrib.pyopenssl import ssl_wrap_socket
HAS_URLLIB3_SNI_SUPPORT = True
except ImportError:
HAS_URLLIB3_SNI_SUPPORT = False
# Select a protocol that includes all secure tls protocols
# Exclude insecure ssl protocols if possible
if HAS_SSL:
# If we can't find extra tls methods, ssl.PROTOCOL_TLSv1 is sufficient
PROTOCOL = ssl.PROTOCOL_TLSv1
if not HAS_SSLCONTEXT and HAS_SSL:
try:
import ctypes, ctypes.util
except ImportError:
# python 2.4 (likely rhel5 which doesn't have tls1.1 support in its openssl)
pass
else:
libssl_name = ctypes.util.find_library('ssl')
libssl = ctypes.CDLL(libssl_name)
for method in ('TLSv1_1_method', 'TLSv1_2_method'):
try:
libssl[method]
# Found something - we'll let openssl autonegotiate and hope
# the server has disabled sslv2 and 3. best we can do.
PROTOCOL = ssl.PROTOCOL_SSLv23
break
except AttributeError:
pass
del libssl
HAS_MATCH_HOSTNAME = True
try:
from ssl import match_hostname, CertificateError
except ImportError:
try:
from backports.ssl_match_hostname import match_hostname, CertificateError
except ImportError:
HAS_MATCH_HOSTNAME = False
if not HAS_MATCH_HOSTNAME:
###
### The following block of code is under the terms and conditions of the
### Python Software Foundation License
###
"""The match_hostname() function from Python 3.4, essential when using SSL."""
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
###
### End of Python Software Foundation Licensed code
###
HAS_MATCH_HOSTNAME = True
# This is a dummy cacert provided for Mac OS since you need at least 1
# ca cert, regardless of validity, for Python on Mac OS to use the
# keychain functionality in OpenSSL for validating SSL certificates.
# See: http://mercurial.selenic.com/wiki/CACertificates#Mac_OS_X_10.6_and_higher
DUMMY_CA_CERT = """-----BEGIN CERTIFICATE-----
MIICvDCCAiWgAwIBAgIJAO8E12S7/qEpMA0GCSqGSIb3DQEBBQUAMEkxCzAJBgNV
BAYTAlVTMRcwFQYDVQQIEw5Ob3J0aCBDYXJvbGluYTEPMA0GA1UEBxMGRHVyaGFt
MRAwDgYDVQQKEwdBbnNpYmxlMB4XDTE0MDMxODIyMDAyMloXDTI0MDMxNTIyMDAy
MlowSTELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYD
VQQHEwZEdXJoYW0xEDAOBgNVBAoTB0Fuc2libGUwgZ8wDQYJKoZIhvcNAQEBBQAD
gY0AMIGJAoGBANtvpPq3IlNlRbCHhZAcP6WCzhc5RbsDqyh1zrkmLi0GwcQ3z/r9
gaWfQBYhHpobK2Tiq11TfraHeNB3/VfNImjZcGpN8Fl3MWwu7LfVkJy3gNNnxkA1
4Go0/LmIvRFHhbzgfuo9NFgjPmmab9eqXJceqZIlz2C8xA7EeG7ku0+vAgMBAAGj
gaswgagwHQYDVR0OBBYEFPnN1nPRqNDXGlCqCvdZchRNi/FaMHkGA1UdIwRyMHCA
FPnN1nPRqNDXGlCqCvdZchRNi/FaoU2kSzBJMQswCQYDVQQGEwJVUzEXMBUGA1UE
CBMOTm9ydGggQ2Fyb2xpbmExDzANBgNVBAcTBkR1cmhhbTEQMA4GA1UEChMHQW5z
aWJsZYIJAO8E12S7/qEpMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEA
MUB80IR6knq9K/tY+hvPsZer6eFMzO3JGkRFBh2kn6JdMDnhYGX7AXVHGflrwNQH
qFy+aenWXsC0ZvrikFxbQnX8GVtDADtVznxOi7XzFw7JOxdsVrpXgSN0eh0aMzvV
zKPZsZ2miVGclicJHzm5q080b1p/sZtuKIEZk6vZqEg=
-----END CERTIFICATE-----
"""
#
# Exceptions
#
class ConnectionError(Exception):
"""Failed to connect to the server"""
pass
class ProxyError(ConnectionError):
"""Failure to connect because of a proxy"""
pass
class SSLValidationError(ConnectionError):
"""Failure to connect due to SSL validation failing"""
pass
class NoSSLError(SSLValidationError):
"""Needed to connect to an HTTPS url but no ssl library available to verify the certificate"""
pass
# Some environments (Google Compute Engine's CoreOS deploys) do not compile
# against openssl and thus do not have any HTTPS support.
CustomHTTPSConnection = CustomHTTPSHandler = None
if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib2, 'HTTPSHandler'):
class CustomHTTPSConnection(httplib.HTTPSConnection):
def __init__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
if HAS_SSLCONTEXT:
self.context = create_default_context()
if self.cert_file:
self.context.load_cert_chain(self.cert_file, self.key_file)
def connect(self):
"Connect to a host on a given (SSL) port."
if hasattr(self, 'source_address'):
sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address)
else:
sock = socket.create_connection((self.host, self.port), self.timeout)
server_hostname = self.host
# Note: self._tunnel_host is not available on py < 2.6 but this code
# isn't used on py < 2.6 (lack of create_connection)
if self._tunnel_host:
self.sock = sock
self._tunnel()
server_hostname = self._tunnel_host
if HAS_SSLCONTEXT:
self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname)
elif HAS_URLLIB3_SNI_SUPPORT:
self.sock = ssl_wrap_socket(sock, keyfile=self.key_file, cert_reqs=ssl.CERT_NONE, certfile=self.cert_file, ssl_version=PROTOCOL, server_hostname=server_hostname)
else:
self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL)
class CustomHTTPSHandler(urllib2.HTTPSHandler):
def https_open(self, req):
return self.do_open(CustomHTTPSConnection, req)
https_request = urllib2.AbstractHTTPHandler.do_request_
def generic_urlparse(parts):
'''
Returns a dictionary of url parts as parsed by urlparse,
but accounts for the fact that older versions of that
library do not support named attributes (ie. .netloc)
'''
generic_parts = dict()
if hasattr(parts, 'netloc'):
# urlparse is newer, just read the fields straight
# from the parts object
generic_parts['scheme'] = parts.scheme
generic_parts['netloc'] = parts.netloc
generic_parts['path'] = parts.path
generic_parts['params'] = parts.params
generic_parts['query'] = parts.query
generic_parts['fragment'] = parts.fragment
generic_parts['username'] = parts.username
generic_parts['password'] = parts.password
generic_parts['hostname'] = parts.hostname
generic_parts['port'] = parts.port
else:
# we have to use indexes, and then parse out
# the other parts not supported by indexing
generic_parts['scheme'] = parts[0]
generic_parts['netloc'] = parts[1]
generic_parts['path'] = parts[2]
generic_parts['params'] = parts[3]
generic_parts['query'] = parts[4]
generic_parts['fragment'] = parts[5]
# get the username, password, etc.
try:
netloc_re = re.compile(r'^((?:\w)+(?::(?:\w)+)?@)?([A-Za-z0-9.-]+)(:\d+)?$')
match = netloc_re.match(parts[1])
auth = match.group(1)
hostname = match.group(2)
port = match.group(3)
if port:
# the capture group for the port will include the ':',
# so remove it and convert the port to an integer
port = int(port[1:])
if auth:
# the capture group above inclues the @, so remove it
# and then split it up based on the first ':' found
auth = auth[:-1]
username, password = auth.split(':', 1)
else:
username = password = None
generic_parts['username'] = username
generic_parts['password'] = password
generic_parts['hostname'] = hostname
generic_parts['port'] = port
except:
generic_parts['username'] = None
generic_parts['password'] = None
generic_parts['hostname'] = parts[1]
generic_parts['port'] = None
return generic_parts
class RequestWithMethod(urllib2.Request):
'''
Workaround for using DELETE/PUT/etc with urllib2
Originally contained in library/net_infrastructure/dnsmadeeasy
'''
def __init__(self, url, method, data=None, headers=None):
if headers is None:
headers = {}
self._method = method.upper()
urllib2.Request.__init__(self, url, data, headers)
def get_method(self):
if self._method:
return self._method
else:
return urllib2.Request.get_method(self)
def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
"""This is a class factory that closes over the value of
``follow_redirects`` so that the RedirectHandler class has access to
that value without having to use globals, and potentially cause problems
where ``open_url`` or ``fetch_url`` are used multiple times in a module.
"""
class RedirectHandler(urllib2.HTTPRedirectHandler):
"""This is an implementation of a RedirectHandler to match the
functionality provided by httplib2. It will utilize the value of
``follow_redirects`` that is passed into ``RedirectHandlerFactory``
to determine how redirects should be handled in urllib2.
"""
def redirect_request(self, req, fp, code, msg, hdrs, newurl):
handler = maybe_add_ssl_handler(newurl, validate_certs)
if handler:
urllib2._opener.add_handler(handler)
if follow_redirects == 'urllib2':
return urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, hdrs, newurl)
elif follow_redirects in ['no', 'none', False]:
raise urllib2.HTTPError(newurl, code, msg, hdrs, fp)
do_redirect = False
if follow_redirects in ['all', 'yes', True]:
do_redirect = (code >= 300 and code < 400)
elif follow_redirects == 'safe':
m = req.get_method()
do_redirect = (code >= 300 and code < 400 and m in ('GET', 'HEAD'))
if do_redirect:
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
newheaders = dict((k,v) for k,v in req.headers.items()
if k.lower() not in ("content-length", "content-type")
)
return urllib2.Request(newurl,
headers=newheaders,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise urllib2.HTTPError(req.get_full_url(), code, msg, hdrs,
fp)
return RedirectHandler
def build_ssl_validation_error(hostname, port, paths):
'''Inteligently build out the SSLValidationError based on what support
you have installed
'''
msg = [
('Failed to validate the SSL certificate for %s:%s.'
' Make sure your managed systems have a valid CA'
' certificate installed.')
]
if not HAS_SSLCONTEXT:
msg.append('If the website serving the url uses SNI you need'
' python >= 2.7.9 on your managed machine')
if not HAS_URLLIB3_SNI_SUPPORT:
msg.append('or you can install the `urllib3`, `pyopenssl`,'
' `ndg-httpsclient`, and `pyasn1` python modules')
msg.append('to perform SNI verification in python >= 2.6.')
msg.append('You can use validate_certs=False if you do'
' not need to confirm the servers identity but this is'
' unsafe and not recommended.'
' Paths checked for this platform: %s')
raise SSLValidationError(' '.join(msg) % (hostname, port, ", ".join(paths)))
class SSLValidationHandler(urllib2.BaseHandler):
'''
A custom handler class for SSL validation.
Based on:
http://stackoverflow.com/questions/1087227/validate-ssl-certificates-with-python
http://techknack.net/python-urllib2-handlers/
'''
CONNECT_COMMAND = "CONNECT %s:%s HTTP/1.0\r\nConnection: close\r\n"
def __init__(self, hostname, port):
self.hostname = hostname
self.port = port
def get_ca_certs(self):
# tries to find a valid CA cert in one of the
# standard locations for the current distribution
ca_certs = []
paths_checked = []
system = platform.system()
# build a list of paths to check for .crt/.pem files
# based on the platform type
paths_checked.append('/etc/ssl/certs')
if system == 'Linux':
paths_checked.append('/etc/pki/ca-trust/extracted/pem')
paths_checked.append('/etc/pki/tls/certs')
paths_checked.append('/usr/share/ca-certificates/cacert.org')
elif system == 'FreeBSD':
paths_checked.append('/usr/local/share/certs')
elif system == 'OpenBSD':
paths_checked.append('/etc/ssl')
elif system == 'NetBSD':
ca_certs.append('/etc/openssl/certs')
elif system == 'SunOS':
paths_checked.append('/opt/local/etc/openssl/certs')
# fall back to a user-deployed cert in a standard
# location if the OS platform one is not available
paths_checked.append('/etc/ansible')
tmp_fd, tmp_path = tempfile.mkstemp()
# Write the dummy ca cert if we are running on Mac OS X
if system == 'Darwin':
os.write(tmp_fd, DUMMY_CA_CERT)
# Default Homebrew path for OpenSSL certs
paths_checked.append('/usr/local/etc/openssl')
# for all of the paths, find any .crt or .pem files
# and compile them into single temp file for use
# in the ssl check to speed up the test
for path in paths_checked:
if os.path.exists(path) and os.path.isdir(path):
dir_contents = os.listdir(path)
for f in dir_contents:
full_path = os.path.join(path, f)
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
try:
cert_file = open(full_path, 'r')
os.write(tmp_fd, cert_file.read())
os.write(tmp_fd, '\n')
cert_file.close()
except:
pass
return (tmp_path, paths_checked)
def validate_proxy_response(self, response, valid_codes=[200]):
'''
make sure we get back a valid code from the proxy
'''
try:
(http_version, resp_code, msg) = re.match(r'(HTTP/\d\.\d) (\d\d\d) (.*)', response).groups()
if int(resp_code) not in valid_codes:
raise Exception
except:
raise ProxyError('Connection to proxy failed')
def detect_no_proxy(self, url):
'''
Detect if the 'no_proxy' environment variable is set and honor those locations.
'''
env_no_proxy = os.environ.get('no_proxy')
if env_no_proxy:
env_no_proxy = env_no_proxy.split(',')
netloc = urlparse.urlparse(url).netloc
for host in env_no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# Our requested URL matches something in no_proxy, so don't
# use the proxy for this
return False
return True
def _make_context(self, tmp_ca_cert_path):
context = create_default_context()
context.load_verify_locations(tmp_ca_cert_path)
return context
def http_request(self, req):
tmp_ca_cert_path, paths_checked = self.get_ca_certs()
https_proxy = os.environ.get('https_proxy')
context = None
if HAS_SSLCONTEXT:
context = self._make_context(tmp_ca_cert_path)
# Detect if 'no_proxy' environment variable is set and if our URL is included
use_proxy = self.detect_no_proxy(req.get_full_url())
if not use_proxy:
# ignore proxy settings for this host request
return req
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if https_proxy:
proxy_parts = generic_urlparse(urlparse.urlparse(https_proxy))
port = proxy_parts.get('port') or 443
s.connect((proxy_parts.get('hostname'), port))
if proxy_parts.get('scheme') == 'http':
s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port))
if proxy_parts.get('username'):
credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password',''))
s.sendall('Proxy-Authorization: Basic %s\r\n' % credentials.encode('base64').strip())
s.sendall('\r\n')
connect_result = s.recv(4096)
self.validate_proxy_response(connect_result)
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
elif HAS_URLLIB3_SNI_SUPPORT:
ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
else:
raise ProxyError('Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s.connect((self.hostname, self.port))
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
elif HAS_URLLIB3_SNI_SUPPORT:
ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
#ssl_s.unwrap()
s.close()
except (ssl.SSLError, socket.error):
e = get_exception()
# fail if we tried all of the certs but none worked
if 'connection refused' in str(e).lower():
raise ConnectionError('Failed to connect to %s:%s.' % (self.hostname, self.port))
else:
build_ssl_validation_error(self.hostname, self.port, paths_checked)
except CertificateError:
build_ssl_validation_error(self.hostname, self.port, paths_checked)
try:
# cleanup the temp file created, don't worry
# if it fails for some reason
os.remove(tmp_ca_cert_path)
except:
pass
return req
https_request = http_request
def maybe_add_ssl_handler(url, validate_certs):
# FIXME: change the following to use the generic_urlparse function
# to remove the indexed references for 'parsed'
parsed = urlparse.urlparse(url)
if parsed[0] == 'https' and validate_certs:
if not HAS_SSL:
raise NoSSLError('SSL validation is not available in your version of python. You can use validate_certs=False, however this is unsafe and not recommended')
# do the cert validation
netloc = parsed[1]
if '@' in netloc:
netloc = netloc.split('@', 1)[1]
if ':' in netloc:
hostname, port = netloc.split(':', 1)
port = int(port)
else:
hostname = netloc
port = 443
# create the SSL validation handler and
# add it to the list of handlers
return SSLValidationHandler(hostname, port)
# Rewrite of fetch_url to not require the module environment
def open_url(url, data=None, headers=None, method=None, use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None,
force_basic_auth=False, follow_redirects='urllib2'):
'''
Fetches a file from an HTTP/FTP server using urllib2
'''
handlers = []
ssl_handler = maybe_add_ssl_handler(url, validate_certs)
if ssl_handler:
handlers.append(ssl_handler)
# FIXME: change the following to use the generic_urlparse function
# to remove the indexed references for 'parsed'
parsed = urlparse.urlparse(url)
if parsed[0] != 'ftp':
username = url_username
if headers is None:
headers = {}
if username:
password = url_password
netloc = parsed[1]
elif '@' in parsed[1]:
credentials, netloc = parsed[1].split('@', 1)
if ':' in credentials:
username, password = credentials.split(':', 1)
else:
username = credentials
password = ''
parsed = list(parsed)
parsed[1] = netloc
# reconstruct url without credentials
url = urlparse.urlunparse(parsed)
if username and not force_basic_auth:
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
# this creates a password manager
passman.add_password(None, netloc, username, password)
# because we have put None at the start it will always
# use this username/password combination for urls
# for which `theurl` is a super-url
authhandler = urllib2.HTTPBasicAuthHandler(passman)
# create the AuthHandler
handlers.append(authhandler)
elif username and force_basic_auth:
headers["Authorization"] = basic_auth_header(username, password)
else:
try:
rc = netrc.netrc(os.environ.get('NETRC'))
login = rc.authenticators(parsed[1])
except IOError:
login = None
if login:
username, _, password = login
if username and password:
headers["Authorization"] = basic_auth_header(username, password)
if not use_proxy:
proxyhandler = urllib2.ProxyHandler({})
handlers.append(proxyhandler)
if HAS_SSLCONTEXT and not validate_certs:
# In 2.7.9, the default context validates certificates
context = SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
context.options |= ssl.OP_NO_SSLv3
context.verify_mode = ssl.CERT_NONE
context.check_hostname = False
handlers.append(urllib2.HTTPSHandler(context=context))
# pre-2.6 versions of python cannot use the custom https
# handler, since the socket class is lacking create_connection.
# Some python builds lack HTTPS support.
if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
handlers.append(CustomHTTPSHandler)
handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs))
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
if method:
if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT','PATCH'):
raise ConnectionError('invalid HTTP request method; %s' % method.upper())
request = RequestWithMethod(url, method.upper(), data)
else:
request = urllib2.Request(url, data)
# add the custom agent header, to help prevent issues
# with sites that block the default urllib agent string
request.add_header('User-agent', http_agent)
# if we're ok with getting a 304, set the timestamp in the
# header, otherwise make sure we don't get a cached copy
if last_mod_time and not force:
tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
request.add_header('If-Modified-Since', tstamp)
else:
request.add_header('cache-control', 'no-cache')
# user defined headers now, which may override things we've set above
if headers:
if not isinstance(headers, dict):
raise ValueError("headers provided to fetch_url() must be a dict")
for header in headers:
request.add_header(header, headers[header])
urlopen_args = [request, None]
if sys.version_info >= (2,6,0):
# urlopen in python prior to 2.6.0 did not
# have a timeout parameter
urlopen_args.append(timeout)
r = urllib2.urlopen(*urlopen_args)
return r
#
# Module-related functions
#
def basic_auth_header(username, password):
return "Basic %s" % base64.b64encode("%s:%s" % (username, password))
def url_argument_spec():
'''
Creates an argument spec that can be used with any module
that will be requesting content via urllib/urllib2
'''
return dict(
url = dict(),
force = dict(default='no', aliases=['thirsty'], type='bool'),
http_agent = dict(default='ansible-httpget'),
use_proxy = dict(default='yes', type='bool'),
validate_certs = dict(default='yes', type='bool'),
url_username = dict(required=False),
url_password = dict(required=False),
force_basic_auth = dict(required=False, type='bool', default='no'),
)
def fetch_url(module, url, data=None, headers=None, method=None,
use_proxy=True, force=False, last_mod_time=None, timeout=10):
'''
Fetches a file from an HTTP/FTP server using urllib2. Requires the module environment
'''
if not HAS_URLLIB2:
module.fail_json(msg='urllib2 is not installed')
elif not HAS_URLPARSE:
module.fail_json(msg='urlparse is not installed')
# Get validate_certs from the module params
validate_certs = module.params.get('validate_certs', True)
username = module.params.get('url_username', '')
password = module.params.get('url_password', '')
http_agent = module.params.get('http_agent', None)
force_basic_auth = module.params.get('force_basic_auth', '')
follow_redirects = module.params.get('follow_redirects', 'urllib2')
r = None
info = dict(url=url)
try:
r = open_url(url, data=data, headers=headers, method=method,
use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout,
validate_certs=validate_certs, url_username=username,
url_password=password, http_agent=http_agent, force_basic_auth=force_basic_auth,
follow_redirects=follow_redirects)
info.update(r.info())
info.update(dict(msg="OK (%s bytes)" % r.headers.get('Content-Length', 'unknown'), url=r.geturl(), status=r.code))
except NoSSLError:
e = get_exception()
distribution = get_distribution()
if distribution is not None and distribution.lower() == 'redhat':
module.fail_json(msg='%s. You can also install python-ssl from EPEL' % str(e))
else:
module.fail_json(msg='%s' % str(e))
except (ConnectionError, ValueError):
e = get_exception()
module.fail_json(msg=str(e))
except urllib2.HTTPError:
e = get_exception()
try:
body = e.read()
except AttributeError:
body = ''
info.update(dict(msg=str(e), body=body, **e.info()))
info['status'] = e.code
except urllib2.URLError:
e = get_exception()
code = int(getattr(e, 'code', -1))
info.update(dict(msg="Request failed: %s" % str(e), status=code))
except socket.error:
e = get_exception()
info.update(dict(msg="Connection failure: %s" % str(e), status=-1))
except Exception:
e = get_exception()
info.update(dict(msg="An unknown error occurred: %s" % str(e), status=-1))
return r, info
|
MinimalOS/external_chromium_org_third_party_skia
|
refs/heads/lp-mr1
|
platform_tools/android/bin/download_utils.py
|
149
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A library to assist automatically downloading files.
This library is used by scripts that download tarballs, zipfiles, etc. as part
of the build process.
"""
import hashlib
import http_download
import os.path
import re
import shutil
import sys
import time
import urllib2
SOURCE_STAMP = 'SOURCE_URL'
HASH_STAMP = 'SOURCE_SHA1'
# Designed to handle more general inputs than sys.platform because the platform
# name may come from the command line.
PLATFORM_COLLAPSE = {
'windows': 'windows',
'win32': 'windows',
'cygwin': 'windows',
'linux': 'linux',
'linux2': 'linux',
'linux3': 'linux',
'darwin': 'mac',
'mac': 'mac',
}
ARCH_COLLAPSE = {
'i386' : 'x86',
'i686' : 'x86',
'x86_64': 'x86',
'armv7l': 'arm',
}
class HashError(Exception):
def __init__(self, download_url, expected_hash, actual_hash):
self.download_url = download_url
self.expected_hash = expected_hash
self.actual_hash = actual_hash
def __str__(self):
return 'Got hash "%s" but expected hash "%s" for "%s"' % (
self.actual_hash, self.expected_hash, self.download_url)
def PlatformName(name=None):
if name is None:
name = sys.platform
return PLATFORM_COLLAPSE[name]
def ArchName(name=None):
if name is None:
if PlatformName() == 'windows':
# TODO(pdox): Figure out how to auto-detect 32-bit vs 64-bit Windows.
name = 'i386'
else:
import platform
name = platform.machine()
return ARCH_COLLAPSE[name]
def EnsureFileCanBeWritten(filename):
directory = os.path.dirname(filename)
if not os.path.exists(directory):
os.makedirs(directory)
def WriteData(filename, data):
EnsureFileCanBeWritten(filename)
f = open(filename, 'wb')
f.write(data)
f.close()
def WriteDataFromStream(filename, stream, chunk_size, verbose=True):
EnsureFileCanBeWritten(filename)
dst = open(filename, 'wb')
try:
while True:
data = stream.read(chunk_size)
if len(data) == 0:
break
dst.write(data)
if verbose:
# Indicate that we're still writing.
sys.stdout.write('.')
sys.stdout.flush()
finally:
if verbose:
sys.stdout.write('\n')
dst.close()
def DoesStampMatch(stampfile, expected, index):
try:
f = open(stampfile, 'r')
stamp = f.read()
f.close()
if stamp.split('\n')[index] == expected:
return "already up-to-date."
elif stamp.startswith('manual'):
return "manual override."
return False
except IOError:
return False
def WriteStamp(stampfile, data):
EnsureFileCanBeWritten(stampfile)
f = open(stampfile, 'w')
f.write(data)
f.close()
def StampIsCurrent(path, stamp_name, stamp_contents, min_time=None, index=0):
stampfile = os.path.join(path, stamp_name)
# Check if the stampfile is older than the minimum last mod time
if min_time:
try:
stamp_time = os.stat(stampfile).st_mtime
if stamp_time <= min_time:
return False
except OSError:
return False
return DoesStampMatch(stampfile, stamp_contents, index)
def WriteSourceStamp(path, url):
stampfile = os.path.join(path, SOURCE_STAMP)
WriteStamp(stampfile, url)
def WriteHashStamp(path, hash_val):
hash_stampfile = os.path.join(path, HASH_STAMP)
WriteStamp(hash_stampfile, hash_val)
def Retry(op, *args):
# Windows seems to be prone to having commands that delete files or
# directories fail. We currently do not have a complete understanding why,
# and as a workaround we simply retry the command a few times.
# It appears that file locks are hanging around longer than they should. This
# may be a secondary effect of processes hanging around longer than they
# should. This may be because when we kill a browser sel_ldr does not exit
# immediately, etc.
# Virus checkers can also accidently prevent files from being deleted, but
# that shouldn't be a problem on the bots.
if sys.platform in ('win32', 'cygwin'):
count = 0
while True:
try:
op(*args)
break
except Exception:
sys.stdout.write("FAILED: %s %s\n" % (op.__name__, repr(args)))
count += 1
if count < 5:
sys.stdout.write("RETRY: %s %s\n" % (op.__name__, repr(args)))
time.sleep(pow(2, count))
else:
# Don't mask the exception.
raise
else:
op(*args)
def MoveDirCleanly(src, dst):
RemoveDir(dst)
MoveDir(src, dst)
def MoveDir(src, dst):
Retry(shutil.move, src, dst)
def RemoveDir(path):
if os.path.exists(path):
Retry(shutil.rmtree, path)
def RemoveFile(path):
if os.path.exists(path):
Retry(os.unlink, path)
def _HashFileHandle(fh):
"""sha1 of a file like object.
Arguments:
fh: file handle like object to hash.
Returns:
sha1 as a string.
"""
hasher = hashlib.sha1()
try:
while True:
data = fh.read(4096)
if not data:
break
hasher.update(data)
finally:
fh.close()
return hasher.hexdigest()
def HashFile(filename):
"""sha1 a file on disk.
Arguments:
filename: filename to hash.
Returns:
sha1 as a string.
"""
fh = open(filename, 'rb')
return _HashFileHandle(fh)
def HashUrlByDownloading(url):
"""sha1 the data at an url.
Arguments:
url: url to download from.
Returns:
sha1 of the data at the url.
"""
try:
fh = urllib2.urlopen(url)
except:
sys.stderr.write("Failed fetching URL: %s\n" % url)
raise
return _HashFileHandle(fh)
# Attempts to get the SHA1 hash of a file given a URL by looking for
# an adjacent file with a ".sha1hash" suffix. This saves having to
# download a large tarball just to get its hash. Otherwise, we fall
# back to downloading the main file.
def HashUrl(url):
hash_url = '%s.sha1hash' % url
try:
fh = urllib2.urlopen(hash_url)
data = fh.read(100)
fh.close()
except urllib2.HTTPError, exn:
if exn.code == 404:
return HashUrlByDownloading(url)
raise
else:
if not re.match('[0-9a-f]{40}\n?$', data):
raise AssertionError('Bad SHA1 hash file: %r' % data)
return data.strip()
def SyncURL(url, filename=None, stamp_dir=None, min_time=None,
hash_val=None, keep=False, verbose=False, stamp_index=0):
"""Synchronize a destination file with a URL
if the URL does not match the URL stamp, then we must re-download it.
Arugments:
url: the url which will to compare against and download
filename: the file to create on download
path: the download path
stamp_dir: the filename containing the URL stamp to check against
hash_val: if set, the expected hash which must be matched
verbose: prints out status as it runs
stamp_index: index within the stamp file to check.
Returns:
True if the file is replaced
False if the file is not replaced
Exception:
HashError: if the hash does not match
"""
assert url and filename
# If we are not keeping the tarball, or we already have it, we can
# skip downloading it for this reason. If we are keeping it,
# it must exist.
if keep:
tarball_ok = os.path.isfile(filename)
else:
tarball_ok = True
# If we don't need the tarball and the stamp_file matches the url, then
# we must be up to date. If the URL differs but the recorded hash matches
# the one we'll insist the tarball has, then that's good enough too.
# TODO(mcgrathr): Download the .sha1sum file first to compare with
# the cached hash, in case --file-hash options weren't used.
if tarball_ok and stamp_dir is not None:
if StampIsCurrent(stamp_dir, SOURCE_STAMP, url, min_time):
if verbose:
print '%s is already up to date.' % filename
return False
if (hash_val is not None and
StampIsCurrent(stamp_dir, HASH_STAMP, hash_val, min_time, stamp_index)):
if verbose:
print '%s is identical to the up to date file.' % filename
return False
if verbose:
print 'Updating %s\n\tfrom %s.' % (filename, url)
EnsureFileCanBeWritten(filename)
http_download.HttpDownload(url, filename)
if hash_val:
tar_hash = HashFile(filename)
if hash_val != tar_hash:
raise HashError(actual_hash=tar_hash, expected_hash=hash_val,
download_url=url)
return True
|
0x0all/scikit-learn
|
refs/heads/master
|
examples/plot_isotonic_regression.py
|
303
|
"""
===================
Isotonic Regression
===================
An illustration of the isotonic regression on generated data. The
isotonic regression finds a non-decreasing approximation of a function
while minimizing the mean squared error on the training data. The benefit
of such a model is that it does not assume any form for the target
function such as linearity. For comparison a linear regression is also
presented.
"""
print(__doc__)
# Author: Nelle Varoquaux <nelle.varoquaux@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Licence: BSD
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection
from sklearn.linear_model import LinearRegression
from sklearn.isotonic import IsotonicRegression
from sklearn.utils import check_random_state
n = 100
x = np.arange(n)
rs = check_random_state(0)
y = rs.randint(-50, 50, size=(n,)) + 50. * np.log(1 + np.arange(n))
###############################################################################
# Fit IsotonicRegression and LinearRegression models
ir = IsotonicRegression()
y_ = ir.fit_transform(x, y)
lr = LinearRegression()
lr.fit(x[:, np.newaxis], y) # x needs to be 2d for LinearRegression
###############################################################################
# plot result
segments = [[[i, y[i]], [i, y_[i]]] for i in range(n)]
lc = LineCollection(segments, zorder=0)
lc.set_array(np.ones(len(y)))
lc.set_linewidths(0.5 * np.ones(n))
fig = plt.figure()
plt.plot(x, y, 'r.', markersize=12)
plt.plot(x, y_, 'g.-', markersize=12)
plt.plot(x, lr.predict(x[:, np.newaxis]), 'b-')
plt.gca().add_collection(lc)
plt.legend(('Data', 'Isotonic Fit', 'Linear Fit'), loc='lower right')
plt.title('Isotonic regression')
plt.show()
|
scorpilix/Golemtest
|
refs/heads/develop
|
golem/diag/vm.py
|
2
|
import os
import psutil
from golem.core.common import is_windows, is_osx
from golem.diag.service import DiagnosticsProvider
class VMDiagnosticsProvider(DiagnosticsProvider):
def __init__(self):
self.process = psutil.Process(os.getpid())
self.attrs = [
'connections', 'cpu_percent', 'cpu_times', 'create_time',
'memory_info', 'memory_percent',
'nice', 'num_ctx_switches', 'num_threads', 'status',
'username', 'cwd'
]
if is_windows():
self.attrs += ['num_handles']
else:
self.attrs += ['uids', 'num_fds']
if not is_osx():
self.attrs += ['cpu_affinity', 'io_counters']
def get_diagnostics(self, output_format):
data = self.process.as_dict(attrs=self.attrs)
return self._format_diagnostics(data, output_format)
|
rbaindourov/v8-inspector
|
refs/heads/master
|
Source/chrome/v8/tools/testrunner/server/presence_handler.py
|
123
|
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import socket
import SocketServer
import threading
try:
import ujson as json
except:
import json
from . import constants
from ..objects import peer
STARTUP_REQUEST = "V8 test peer starting up"
STARTUP_RESPONSE = "Let's rock some tests!"
EXIT_REQUEST = "V8 testing peer going down"
def GetOwnIP():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
class PresenceHandler(SocketServer.BaseRequestHandler):
def handle(self):
data = json.loads(self.request[0].strip())
if data[0] == STARTUP_REQUEST:
jobs = data[1]
relative_perf = data[2]
pubkey_fingerprint = data[3]
trusted = self.server.daemon.IsTrusted(pubkey_fingerprint)
response = [STARTUP_RESPONSE, self.server.daemon.jobs,
self.server.daemon.relative_perf,
self.server.daemon.pubkey_fingerprint, trusted]
response = json.dumps(response)
self.server.SendTo(self.client_address[0], response)
p = peer.Peer(self.client_address[0], jobs, relative_perf,
pubkey_fingerprint)
p.trusted = trusted
self.server.daemon.AddPeer(p)
elif data[0] == STARTUP_RESPONSE:
jobs = data[1]
perf = data[2]
pubkey_fingerprint = data[3]
p = peer.Peer(self.client_address[0], jobs, perf, pubkey_fingerprint)
p.trusted = self.server.daemon.IsTrusted(pubkey_fingerprint)
p.trusting_me = data[4]
self.server.daemon.AddPeer(p)
elif data[0] == EXIT_REQUEST:
self.server.daemon.DeletePeer(self.client_address[0])
if self.client_address[0] == self.server.daemon.ip:
self.server.shutdown_lock.release()
class PresenceDaemon(SocketServer.ThreadingMixIn, SocketServer.UDPServer):
def __init__(self, daemon):
self.daemon = daemon
address = (daemon.ip, constants.PRESENCE_PORT)
SocketServer.UDPServer.__init__(self, address, PresenceHandler)
self.shutdown_lock = threading.Lock()
def shutdown(self):
self.shutdown_lock.acquire()
self.SendToAll(json.dumps([EXIT_REQUEST]))
self.shutdown_lock.acquire()
self.shutdown_lock.release()
SocketServer.UDPServer.shutdown(self)
def SendTo(self, target, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(message, (target, constants.PRESENCE_PORT))
sock.close()
def SendToAll(self, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ip = self.daemon.ip.split(".")
for i in range(1, 254):
ip[-1] = str(i)
sock.sendto(message, (".".join(ip), constants.PRESENCE_PORT))
sock.close()
def FindPeers(self):
request = [STARTUP_REQUEST, self.daemon.jobs, self.daemon.relative_perf,
self.daemon.pubkey_fingerprint]
request = json.dumps(request)
self.SendToAll(request)
|
ajylee/gpaw-rtxs
|
refs/heads/rtxs-master
|
oldtest/vdw/gr.py
|
3
|
import os
from ase import *
from gpaw import GPAW, setup_paths
from gpaw.vdw import FFTVDWFunctional
from ase.parallel import rank, barrier
from gpaw.atom.generator import Generator
from gpaw.atom.configurations import parameters
def test():
h=4.02
vdw = FFTVDWFunctional(verbose=1)
L=20
a=3.52
atoms = Atoms(pbc=(True, True, True), cell=(a/sqrt(2), sqrt(6)*a/2.0, L))
atoms.append(Atom('C',[1.5*a/sqrt(2)*1.0/3,a/sqrt(2)*sqrt(3)/2*1.0/3, L/2+h]))
atoms.append(Atom('C',[1.5*a/sqrt(2)*2.0/3,a/sqrt(2)*sqrt(3)/2*2.0/3, L/2+h]))
atoms.append(Atom('C',[a/sqrt(2)/2.0+1.5*a/sqrt(2)*1.0/3,sqrt(6)*a/4.0+a/sqrt(2)*sqrt(3)/2*1.0/3, L/2+h]))
atoms.append(Atom('C',[a/sqrt(2)/2.0+1.5*a/sqrt(2)*2.0/3-a/sqrt(2),sqrt(6)*a/4.0+a/sqrt(2)*sqrt(3)/2*2.0/3, L/2+h]))
atoms.append(Atom('C',[1.5*a/sqrt(2)*1.0/3,a/sqrt(2)*sqrt(3)/2*1.0/3+2.874/2, L/2]))
atoms.append(Atom('C',[1.5*a/sqrt(2)*2.0/3,a/sqrt(2)*sqrt(3)/2*2.0/3+2.874/2, L/2]))
atoms.append(Atom('C',[a/sqrt(2)/2.0+1.5*a/sqrt(2)*1.0/3,sqrt(6)*a/4.0+a/sqrt(2)*sqrt(3)/2*1.0/3+2.874/2, L/2]))
atoms.append(Atom('C',[a/sqrt(2)/2.0+1.5*a/sqrt(2)*2.0/3-a/sqrt(2),sqrt(6)*a/4.0+a/sqrt(2)*sqrt(3)/2*2.0/3+2.874/2, L/2]))
calc = GPAW(h=0.18, xc='revPBE',kpts=(8,8,1),txt=str(h)+'.txt')
atoms.set_calculator(calc)
e2 = atoms.get_potential_energy()
calc.write('gr_dilayer.gpw')
e2vdw = calc.get_xc_difference(vdw)
del atoms[4:]
e = atoms.get_potential_energy()
calc.write('gr_dilayer.gpw')
evdw = calc.get_xc_difference(vdw)
E = 2 * e - e2
Evdw = E + 2 * evdw - e2vdw
print E, Evdw
assert abs(E +0.032131069056 ) < 1e-4
assert abs(Evdw- 0.144516773316) < 1e-4
test()
|
leorochael/odoo
|
refs/heads/8.0
|
addons/auth_oauth/ir_configparameter.py
|
423
|
# -*- coding: utf-8 -*-
from openerp import SUPERUSER_ID
from openerp.osv import osv
class ir_configparameter(osv.Model):
_inherit = 'ir.config_parameter'
def init(self, cr, force=False):
super(ir_configparameter, self).init(cr, force=force)
if force:
IMD = self.pool['ir.model.data']
oauth_oe = IMD.xmlid_to_object(cr, SUPERUSER_ID, 'auth_oauth.provider_openerp')
dbuuid = self.get_param(cr, SUPERUSER_ID, 'database.uuid')
oauth_oe.write({'client_id': dbuuid})
|
catapult-project/catapult-csm
|
refs/heads/master
|
systrace/profile_chrome/perf_tracing_agent.py
|
9
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import optparse
import os
import py_utils
import signal
import subprocess
import sys
import tempfile
from devil.android import device_temp_file
from devil.android.perf import perf_control
from profile_chrome import ui
from systrace import trace_result
from systrace import tracing_agents
_CATAPULT_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..', '..')
sys.path.append(os.path.join(_CATAPULT_DIR, 'telemetry'))
try:
# pylint: disable=F0401
from telemetry.internal.platform.profiler import android_profiling_helper
from telemetry.internal.util import binary_manager
except ImportError:
android_profiling_helper = None
binary_manager = None
_PERF_OPTIONS = [
# Sample across all processes and CPUs to so that the current CPU gets
# recorded to each sample.
'--all-cpus',
# In perf 3.13 --call-graph requires an argument, so use the -g short-hand
# which does not.
'-g',
# Increase priority to avoid dropping samples. Requires root.
'--realtime', '80',
# Record raw samples to get CPU information.
'--raw-samples',
# Increase sampling frequency for better coverage.
'--freq', '2000',
]
class _PerfProfiler(object):
def __init__(self, device, perf_binary, categories):
self._device = device
self._output_file = device_temp_file.DeviceTempFile(
self._device.adb, prefix='perf_output')
self._log_file = tempfile.TemporaryFile()
# TODO(jbudorick) Look at providing a way to unhandroll this once the
# adb rewrite has fully landed.
device_param = (['-s', str(self._device)] if str(self._device) else [])
cmd = ['adb'] + device_param + \
['shell', perf_binary, 'record',
'--output', self._output_file.name] + _PERF_OPTIONS
if categories:
cmd += ['--event', ','.join(categories)]
self._perf_control = perf_control.PerfControl(self._device)
self._perf_control.SetPerfProfilingMode()
self._perf_process = subprocess.Popen(cmd,
stdout=self._log_file,
stderr=subprocess.STDOUT)
def SignalAndWait(self):
self._device.KillAll('perf', signum=signal.SIGINT)
self._perf_process.wait()
self._perf_control.SetDefaultPerfMode()
def _FailWithLog(self, msg):
self._log_file.seek(0)
log = self._log_file.read()
raise RuntimeError('%s. Log output:\n%s' % (msg, log))
def PullResult(self, output_path):
if not self._device.FileExists(self._output_file.name):
self._FailWithLog('Perf recorded no data')
perf_profile = os.path.join(output_path,
os.path.basename(self._output_file.name))
self._device.PullFile(self._output_file.name, perf_profile)
if not os.stat(perf_profile).st_size:
os.remove(perf_profile)
self._FailWithLog('Perf recorded a zero-sized file')
self._log_file.close()
self._output_file.close()
return perf_profile
class PerfProfilerAgent(tracing_agents.TracingAgent):
def __init__(self, device):
tracing_agents.TracingAgent.__init__(self)
self._device = device
self._perf_binary = self._PrepareDevice(device)
self._perf_instance = None
self._categories = None
def __repr__(self):
return 'perf profile'
@staticmethod
def IsSupported():
return bool(android_profiling_helper)
@staticmethod
def _PrepareDevice(device):
if not 'BUILDTYPE' in os.environ:
os.environ['BUILDTYPE'] = 'Release'
if binary_manager.NeedsInit():
binary_manager.InitDependencyManager(None)
return android_profiling_helper.PrepareDeviceForPerf(device)
@classmethod
def GetCategories(cls, device):
perf_binary = cls._PrepareDevice(device)
# Perf binary returns non-zero exit status on "list" command.
return device.RunShellCommand([perf_binary, 'list'], check_return=False)
@py_utils.Timeout(tracing_agents.START_STOP_TIMEOUT)
def StartAgentTracing(self, config, timeout=None):
self._categories = _ComputePerfCategories(config)
self._perf_instance = _PerfProfiler(self._device,
self._perf_binary,
self._categories)
return True
@py_utils.Timeout(tracing_agents.START_STOP_TIMEOUT)
def StopAgentTracing(self, timeout=None):
if not self._perf_instance:
return
self._perf_instance.SignalAndWait()
return True
@py_utils.Timeout(tracing_agents.GET_RESULTS_TIMEOUT)
def GetResults(self, timeout=None):
with open(self._PullTrace(), 'r') as f:
trace_data = f.read()
return trace_result.TraceResult('perf', trace_data)
@staticmethod
def _GetInteractivePerfCommand(perfhost_path, perf_profile, symfs_dir,
required_libs, kallsyms):
cmd = '%s report -n -i %s --symfs %s --kallsyms %s' % (
os.path.relpath(perfhost_path, '.'), perf_profile, symfs_dir, kallsyms)
for lib in required_libs:
lib = os.path.join(symfs_dir, lib[1:])
if not os.path.exists(lib):
continue
objdump_path = android_profiling_helper.GetToolchainBinaryPath(
lib, 'objdump')
if objdump_path:
cmd += ' --objdump %s' % os.path.relpath(objdump_path, '.')
break
return cmd
def _PullTrace(self):
symfs_dir = os.path.join(tempfile.gettempdir(),
os.path.expandvars('$USER-perf-symfs'))
if not os.path.exists(symfs_dir):
os.makedirs(symfs_dir)
required_libs = set()
# Download the recorded perf profile.
perf_profile = self._perf_instance.PullResult(symfs_dir)
required_libs = \
android_profiling_helper.GetRequiredLibrariesForPerfProfile(
perf_profile)
if not required_libs:
logging.warning('No libraries required by perf trace. Most likely there '
'are no samples in the trace.')
# Build a symfs with all the necessary libraries.
kallsyms = android_profiling_helper.CreateSymFs(self._device,
symfs_dir,
required_libs,
use_symlinks=False)
perfhost_path = binary_manager.FetchPath(
android_profiling_helper.GetPerfhostName(), 'x86_64', 'linux')
ui.PrintMessage('\nNote: to view the profile in perf, run:')
ui.PrintMessage(' ' + self._GetInteractivePerfCommand(perfhost_path,
perf_profile, symfs_dir, required_libs, kallsyms))
# Convert the perf profile into JSON.
perf_script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'third_party', 'perf_to_tracing.py')
json_file_name = os.path.basename(perf_profile)
with open(os.devnull, 'w') as dev_null, \
open(json_file_name, 'w') as json_file:
cmd = [perfhost_path, 'script', '-s', perf_script_path, '-i',
perf_profile, '--symfs', symfs_dir, '--kallsyms', kallsyms]
if subprocess.call(cmd, stdout=json_file, stderr=dev_null):
logging.warning('Perf data to JSON conversion failed. The result will '
'not contain any perf samples. You can still view the '
'perf data manually as shown above.')
return None
return json_file_name
def SupportsExplicitClockSync(self):
return False
def RecordClockSyncMarker(self, sync_id, did_record_sync_marker_callback):
# pylint: disable=unused-argument
assert self.SupportsExplicitClockSync(), ('Clock sync marker cannot be '
'recorded since explicit clock sync is not supported.')
def _OptionalValueCallback(default_value):
def callback(option, _, __, parser): # pylint: disable=unused-argument
value = default_value
if parser.rargs and not parser.rargs[0].startswith('-'):
value = parser.rargs.pop(0)
setattr(parser.values, option.dest, value)
return callback
class PerfConfig(tracing_agents.TracingConfig):
def __init__(self, perf_categories, device):
tracing_agents.TracingConfig.__init__(self)
self.perf_categories = perf_categories
self.device = device
def try_create_agent(config):
if config.perf_categories:
return PerfProfilerAgent(config.device)
return None
def add_options(parser):
options = optparse.OptionGroup(parser, 'Perf profiling options')
options.add_option('-p', '--perf', help='Capture a perf profile with '
'the chosen comma-delimited event categories. '
'Samples CPU cycles by default. Use "list" to see '
'the available sample types.', action='callback',
default='', callback=_OptionalValueCallback('cycles'),
metavar='PERF_CATEGORIES', dest='perf_categories')
return options
def get_config(options):
return PerfConfig(options.perf_categories, options.device)
def _ComputePerfCategories(config):
if not PerfProfilerAgent.IsSupported():
return []
if not config.perf_categories:
return []
return config.perf_categories.split(',')
|
adrian-the-git/mezzanine
|
refs/heads/master
|
mezzanine/forms/defaults.py
|
48
|
"""
Default settings for the ``mezzanine.forms`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
register_setting(
name="FORMS_FIELD_MAX_LENGTH",
description=_("Max length allowed for field values in the forms app."),
editable=False,
default=2000,
)
register_setting(
name="FORMS_LABEL_MAX_LENGTH",
description=_("Max length allowed for field labels in the forms app."),
editable=False,
default=200,
)
register_setting(
name="FORMS_CSV_DELIMITER",
description=_("Char to use as a field delimiter when exporting form "
"responses as CSV."),
editable=False,
default=",",
)
register_setting(
name="FORMS_UPLOAD_ROOT",
description=_("Absolute path for storing file uploads for the forms app."),
editable=False,
default="",
)
register_setting(
name="FORMS_EXTRA_FIELDS",
description=_("Extra field types for the forms app. Should contain a "
"sequence of three-item sequences, each containing the ID, dotted "
"import path for the field class, and field name, for each custom "
"field type. The ID is simply a numeric constant for the field, "
"but cannot be a value already used, so choose a high number such "
"as 100 or greater to avoid conflicts."),
editable=False,
default=(),
)
register_setting(
name="FORMS_EXTRA_WIDGETS",
description=_("Extra field widgets for the forms app. Should contain a "
"sequence of two-item sequences, each containing an existing ID "
"for a form field, and a dotted import path for the widget class."),
editable=False,
default=(),
)
|
rdonnelly/ultimate-league-app
|
refs/heads/master
|
src/ultimate/leagues/migrations/0015_auto_20190509_2101.py
|
2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def update_group_lock_date(apps, schema_editor):
League = apps.get_model('leagues', 'League')
League.objects.all().update(
group_lock_start_date=models.F('waitlist_start_date')
)
class Migration(migrations.Migration):
dependencies = [
('leagues', '0014_league_group_lock_start_date'),
]
operations = [
migrations.RunPython(update_group_lock_date),
]
|
Yong-Lee/decode-Django
|
refs/heads/master
|
Django-1.5.1/tests/regressiontests/introspection/tests.py
|
44
|
from __future__ import absolute_import, unicode_literals
from functools import update_wrapper
from django.db import connection
from django.test import TestCase, skipUnlessDBFeature, skipIfDBFeature
from django.utils import six, unittest
from .models import Reporter, Article
if connection.vendor == 'oracle':
expectedFailureOnOracle = unittest.expectedFailure
else:
expectedFailureOnOracle = lambda f: f
# The introspection module is optional, so methods tested here might raise
# NotImplementedError. This is perfectly acceptable behavior for the backend
# in question, but the tests need to handle this without failing. Ideally we'd
# skip these tests, but until #4788 is done we'll just ignore them.
#
# The easiest way to accomplish this is to decorate every test case with a
# wrapper that ignores the exception.
#
# The metaclass is just for fun.
def ignore_not_implemented(func):
def _inner(*args, **kwargs):
try:
return func(*args, **kwargs)
except NotImplementedError:
return None
update_wrapper(_inner, func)
return _inner
class IgnoreNotimplementedError(type):
def __new__(cls, name, bases, attrs):
for k, v in attrs.items():
if k.startswith('test'):
attrs[k] = ignore_not_implemented(v)
return type.__new__(cls, name, bases, attrs)
class IntrospectionTests(six.with_metaclass(IgnoreNotimplementedError, TestCase)):
def test_table_names(self):
tl = connection.introspection.table_names()
self.assertEqual(tl, sorted(tl))
self.assertTrue(Reporter._meta.db_table in tl,
"'%s' isn't in table_list()." % Reporter._meta.db_table)
self.assertTrue(Article._meta.db_table in tl,
"'%s' isn't in table_list()." % Article._meta.db_table)
def test_django_table_names(self):
cursor = connection.cursor()
cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);')
tl = connection.introspection.django_table_names()
cursor.execute("DROP TABLE django_ixn_test_table;")
self.assertTrue('django_ixn_testcase_table' not in tl,
"django_table_names() returned a non-Django table")
def test_django_table_names_retval_type(self):
# Ticket #15216
cursor = connection.cursor()
cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);')
tl = connection.introspection.django_table_names(only_existing=True)
self.assertIs(type(tl), list)
tl = connection.introspection.django_table_names(only_existing=False)
self.assertIs(type(tl), list)
def test_installed_models(self):
tables = [Article._meta.db_table, Reporter._meta.db_table]
models = connection.introspection.installed_models(tables)
self.assertEqual(models, set([Article, Reporter]))
def test_sequence_list(self):
sequences = connection.introspection.sequence_list()
expected = {'table': Reporter._meta.db_table, 'column': 'id'}
self.assertTrue(expected in sequences,
'Reporter sequence not found in sequence_list()')
def test_get_table_description_names(self):
cursor = connection.cursor()
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual([r[0] for r in desc],
[f.column for f in Reporter._meta.fields])
def test_get_table_description_types(self):
cursor = connection.cursor()
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual(
[datatype(r[1], r) for r in desc],
['IntegerField', 'CharField', 'CharField', 'CharField', 'BigIntegerField']
)
# The following test fails on Oracle due to #17202 (can't correctly
# inspect the length of character columns).
@expectedFailureOnOracle
def test_get_table_description_col_lengths(self):
cursor = connection.cursor()
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual(
[r[3] for r in desc if datatype(r[1], r) == 'CharField'],
[30, 30, 75]
)
# Oracle forces null=True under the hood in some cases (see
# https://docs.djangoproject.com/en/dev/ref/databases/#null-and-empty-strings)
# so its idea about null_ok in cursor.description is different from ours.
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_get_table_description_nullable(self):
cursor = connection.cursor()
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual(
[r[6] for r in desc],
[False, False, False, False, True]
)
# Regression test for #9991 - 'real' types in postgres
@skipUnlessDBFeature('has_real_datatype')
def test_postgresql_real_type(self):
cursor = connection.cursor()
cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);")
desc = connection.introspection.get_table_description(cursor, 'django_ixn_real_test_table')
cursor.execute('DROP TABLE django_ixn_real_test_table;')
self.assertEqual(datatype(desc[0][1], desc[0]), 'FloatField')
def test_get_relations(self):
cursor = connection.cursor()
relations = connection.introspection.get_relations(cursor, Article._meta.db_table)
# Older versions of MySQL don't have the chops to report on this stuff,
# so just skip it if no relations come back. If they do, though, we
# should test that the response is correct.
if relations:
# That's {field_index: (field_index_other_table, other_table)}
self.assertEqual(relations, {3: (0, Reporter._meta.db_table)})
def test_get_key_columns(self):
cursor = connection.cursor()
key_columns = connection.introspection.get_key_columns(cursor, Article._meta.db_table)
self.assertEqual(key_columns, [('reporter_id', Reporter._meta.db_table, 'id')])
def test_get_primary_key_column(self):
cursor = connection.cursor()
primary_key_column = connection.introspection.get_primary_key_column(cursor, Article._meta.db_table)
self.assertEqual(primary_key_column, 'id')
def test_get_indexes(self):
cursor = connection.cursor()
indexes = connection.introspection.get_indexes(cursor, Article._meta.db_table)
self.assertEqual(indexes['reporter_id'], {'unique': False, 'primary_key': False})
def test_get_indexes_multicol(self):
"""
Test that multicolumn indexes are not included in the introspection
results.
"""
cursor = connection.cursor()
indexes = connection.introspection.get_indexes(cursor, Reporter._meta.db_table)
self.assertNotIn('first_name', indexes)
self.assertIn('id', indexes)
def datatype(dbtype, description):
"""Helper to convert a data type into a string."""
dt = connection.introspection.get_field_type(dbtype, description)
if type(dt) is tuple:
return dt[0]
else:
return dt
|
wsmith323/django
|
refs/heads/master
|
django/db/backends/utils.py
|
430
|
from __future__ import unicode_literals
import datetime
import decimal
import hashlib
import logging
from time import time
from django.conf import settings
from django.utils.encoding import force_bytes
from django.utils.timezone import utc
logger = logging.getLogger('django.db.backends')
class CursorWrapper(object):
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db
WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset'])
def __getattr__(self, attr):
cursor_attr = getattr(self.cursor, attr)
if attr in CursorWrapper.WRAP_ERROR_ATTRS:
return self.db.wrap_database_errors(cursor_attr)
else:
return cursor_attr
def __iter__(self):
with self.db.wrap_database_errors:
for item in self.cursor:
yield item
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Ticket #17671 - Close instead of passing thru to avoid backend
# specific behavior. Catch errors liberally because errors in cleanup
# code aren't useful.
try:
self.close()
except self.db.Database.Error:
pass
# The following methods cannot be implemented in __getattr__, because the
# code must run when the method is invoked, not just when it is accessed.
def callproc(self, procname, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
else:
return self.cursor.callproc(procname, params)
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
else:
return self.cursor.execute(sql, params)
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)
class CursorDebugWrapper(CursorWrapper):
# XXX callproc isn't instrumented at this time.
def execute(self, sql, params=None):
start = time()
try:
return super(CursorDebugWrapper, self).execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries_log.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug('(%.3f) %s; args=%s' % (duration, sql, params),
extra={'duration': duration, 'sql': sql, 'params': params}
)
def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except TypeError: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug('(%.3f) %s; args=%s' % (duration, sql, param_list),
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s:
return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.time(int(hour), int(minutes), int(seconds), int(float('.' + microseconds) * 1000000))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s:
return None
if ' ' not in s:
return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
tzinfo = utc if settings.USE_TZ else None
return datetime.datetime(int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds),
int((microseconds + '000000')[:6]), tzinfo)
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
return '%s%s' % (name[:length - hash_len], hsh)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value)
if decimal_places is not None:
return "%.*f" % (decimal_places, value)
return "{:f}".format(value)
|
lmyrefelt/CouchPotatoServer
|
refs/heads/develop
|
couchpotato/core/migration/__init__.py
|
12133432
| |
tbeadle/django
|
refs/heads/master
|
django/conf/locale/lv/__init__.py
|
12133432
| |
NaturalGIS/naturalgis_qgis
|
refs/heads/master
|
python/plugins/processing/algs/saga/__init__.py
|
12133432
| |
CiscoSystems/tempest
|
refs/heads/master
|
tempest/cli/simple_read_only/__init__.py
|
12133432
| |
fernandezcuesta/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/misc/__init__.py
|
12133432
| |
SocieteCitoyenne/FranceData
|
refs/heads/master
|
francedata/spiders/__init__.py
|
2415
|
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
|
dairiki/puppyserv
|
refs/heads/master
|
puppyserv/paste.py
|
1
|
# -*- coding: utf-8 -*-
""" WSGI deployment helpers
"""
from __future__ import absolute_import
from datetime import datetime
from webob.dec import wsgify
def add_server_headers_filter(global_config, **settings):
""" Middleware to add headers normally added by real http server.
Used when uwsgi is serving HTTP request directly.
"""
from puppyserv import SERVER_NAME
@wsgify.middleware
def filter(request, app):
response = request.get_response(app)
response.server = SERVER_NAME
response.date=datetime.utcnow()
return response
return filter
|
ahoppen/swift
|
refs/heads/master
|
utils/apply-fixit-edits.py
|
48
|
#!/usr/bin/env python
# utils/apply-fixit-edits.py - Apply edits from .remap files -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import print_function
import argparse
import collections
import json
import os
import sys
def find_remap_files(path):
for root, dirs, files in os.walk(path):
for filename in files:
if not filename.endswith(".remap"):
continue
yield os.path.join(root, filename)
def apply_edits(path):
remap_files = find_remap_files(path)
if not remap_files:
print("No remap files found")
return 1
edits_per_file = collections.defaultdict(list)
for remap_file in remap_files:
with open(remap_file) as f:
json_data = f.read()
json_data = json_data.replace(",\n }", "\n }")
json_data = json_data.replace(",\n]", "\n]")
curr_edits = json.loads(json_data)
for ed in curr_edits:
fname = ed["file"]
offset = ed["offset"]
length = ed.get("remove", 0)
text = ed.get("text", "")
edits_per_file[fname].append((offset, length, text))
for fname, edits in edits_per_file.iteritems():
print('Updating', fname)
edits.sort(reverse=True)
with open(fname) as f:
file_data = f.read()
for ed in edits:
offset, length, text = ed
file_data = file_data[:offset] + str(text) + \
file_data[offset + length:]
with open(fname, 'w') as f:
f.write(file_data)
return 0
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="""Finds all .remap files in a directory and applies their
edits to the source files.""")
parser.add_argument("build_dir_path",
help="path to index info")
args = parser.parse_args()
return apply_edits(args.build_dir_path)
if __name__ == "__main__":
sys.exit(main())
|
Nowheresly/odoo
|
refs/heads/8.0
|
addons/base_report_designer/plugin/openerp_report_designer/bin/script/LoginTest.py
|
384
|
#########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
if __name__<>"package":
from ServerParameter import *
from lib.gui import *
class LoginTest:
def __init__(self):
if not loginstatus:
Change(None)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
alexeyum/scikit-learn
|
refs/heads/master
|
examples/model_selection/plot_underfitting_overfitting.py
|
53
|
"""
============================
Underfitting vs. Overfitting
============================
This example demonstrates the problems of underfitting and overfitting and
how we can use linear regression with polynomial features to approximate
nonlinear functions. The plot shows the function that we want to approximate,
which is a part of the cosine function. In addition, the samples from the
real function and the approximations of different models are displayed. The
models have polynomial features of different degrees. We can see that a
linear function (polynomial with degree 1) is not sufficient to fit the
training samples. This is called **underfitting**. A polynomial of degree 4
approximates the true function almost perfectly. However, for higher degrees
the model will **overfit** the training data, i.e. it learns the noise of the
training data.
We evaluate quantitatively **overfitting** / **underfitting** by using
cross-validation. We calculate the mean squared error (MSE) on the validation
set, the higher, the less likely the model generalizes correctly from the
training data.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import cross_val_score
np.random.seed(0)
n_samples = 30
degrees = [1, 4, 15]
true_fun = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fun(X) + np.random.randn(n_samples) * 0.1
plt.figure(figsize=(14, 5))
for i in range(len(degrees)):
ax = plt.subplot(1, len(degrees), i + 1)
plt.setp(ax, xticks=(), yticks=())
polynomial_features = PolynomialFeatures(degree=degrees[i],
include_bias=False)
linear_regression = LinearRegression()
pipeline = Pipeline([("polynomial_features", polynomial_features),
("linear_regression", linear_regression)])
pipeline.fit(X[:, np.newaxis], y)
# Evaluate the models using crossvalidation
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
X_test = np.linspace(0, 1, 100)
plt.plot(X_test, pipeline.predict(X_test[:, np.newaxis]), label="Model")
plt.plot(X_test, true_fun(X_test), label="True function")
plt.scatter(X, y, label="Samples")
plt.xlabel("x")
plt.ylabel("y")
plt.xlim((0, 1))
plt.ylim((-2, 2))
plt.legend(loc="best")
plt.title("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
plt.show()
|
nikolas/edx-platform
|
refs/heads/master
|
openedx/core/operations.py
|
82
|
import os
import signal
import tempfile
import gc
from datetime import datetime
from meliae import scanner
def dump_memory(signum, frame):
"""
Dump memory stats for the current process to a temp directory.
Uses the meliae output format.
"""
timestamp = datetime.now().isoformat()
format_str = '{}/meliae.{}.{}.{{}}.dump'.format(
tempfile.gettempdir(),
timestamp,
os.getpid(),
)
scanner.dump_all_objects(format_str.format('pre-gc'))
# force garbarge collection
for gen in xrange(3):
gc.collect(gen)
scanner.dump_all_objects(
format_str.format("gc-gen-{}".format(gen))
)
def install_memory_dumper(dump_signal=signal.SIGPROF):
"""
Install a signal handler on `signal` to dump memory stats for the current process.
"""
signal.signal(dump_signal, dump_memory)
|
yarikoptic/scrapy
|
refs/heads/master
|
tests/test_utils_signal.py
|
121
|
from testfixtures import LogCapture
from twisted.trial import unittest
from twisted.python.failure import Failure
from twisted.internet import defer, reactor
from pydispatch import dispatcher
from scrapy.utils.signal import send_catch_log, send_catch_log_deferred
class SendCatchLogTest(unittest.TestCase):
@defer.inlineCallbacks
def test_send_catch_log(self):
test_signal = object()
handlers_called = set()
dispatcher.connect(self.error_handler, signal=test_signal)
dispatcher.connect(self.ok_handler, signal=test_signal)
with LogCapture() as l:
result = yield defer.maybeDeferred(
self._get_result, test_signal, arg='test',
handlers_called=handlers_called
)
assert self.error_handler in handlers_called
assert self.ok_handler in handlers_called
self.assertEqual(len(l.records), 1)
record = l.records[0]
self.assertIn('error_handler', record.getMessage())
self.assertEqual(record.levelname, 'ERROR')
self.assertEqual(result[0][0], self.error_handler)
self.assert_(isinstance(result[0][1], Failure))
self.assertEqual(result[1], (self.ok_handler, "OK"))
dispatcher.disconnect(self.error_handler, signal=test_signal)
dispatcher.disconnect(self.ok_handler, signal=test_signal)
def _get_result(self, signal, *a, **kw):
return send_catch_log(signal, *a, **kw)
def error_handler(self, arg, handlers_called):
handlers_called.add(self.error_handler)
a = 1/0
def ok_handler(self, arg, handlers_called):
handlers_called.add(self.ok_handler)
assert arg == 'test'
return "OK"
class SendCatchLogDeferredTest(SendCatchLogTest):
def _get_result(self, signal, *a, **kw):
return send_catch_log_deferred(signal, *a, **kw)
class SendCatchLogDeferredTest2(SendCatchLogTest):
def ok_handler(self, arg, handlers_called):
handlers_called.add(self.ok_handler)
assert arg == 'test'
d = defer.Deferred()
reactor.callLater(0, d.callback, "OK")
return d
def _get_result(self, signal, *a, **kw):
return send_catch_log_deferred(signal, *a, **kw)
class SendCatchLogTest2(unittest.TestCase):
def test_error_logged_if_deferred_not_supported(self):
test_signal = object()
test_handler = lambda: defer.Deferred()
dispatcher.connect(test_handler, test_signal)
with LogCapture() as l:
send_catch_log(test_signal)
self.assertEqual(len(l.records), 1)
self.assertIn("Cannot return deferreds from signal handler", str(l))
dispatcher.disconnect(test_handler, test_signal)
|
aebrahim/CyLP
|
refs/heads/master
|
cylp/py/pivots/DualDantzigPivot.py
|
3
|
'''
As a part of ``cylp.python.pivots`` it implements Dantzig's
Simplex dual pivot rule. Although it already exists in CLP,
for testing purposes we implement one in Python.
'''
import sys
import numpy as np
from operator import itemgetter
from random import shuffle
from math import floor
from DualPivotPythonBase import DualPivotPythonBase
#from cylp.py.pivots import DantzigPivot
class DualDantzigPivot(DualPivotPythonBase):
'''
Dantzig's dual pivot rule implementation.
.. _custom-dual-pivot-usage:
**Usage**
>>> from cylp.cy import CyClpSimplex
>>> from cylp.py.pivots.DualDantzigPivot import DualDantzigPivot
>>> from cylp.py.pivots.DualDantzigPivot import getMpsExample
>>> # Get the path to a sample mps file
>>> f = getMpsExample()
>>> s = CyClpSimplex()
>>> s.readMps(f) # Returns 0 if OK
0
>>> pivot = DualDantzigPivot(s)
>>> s.setDualPivotMethod(pivot)
>>> s.dual()
'optimal'
>>> round(s.objectiveValue, 5)
2520.57174
'''
def __init__(self, clpModel):
self.dim = clpModel.nRows + clpModel.nCols
self.clpModel = clpModel
def pivotRow(self):
model = self.clpModel
nConstraints = model.nConstraints
basicVarInds = model.basicVariables
u = model.upper[basicVarInds]
l = model.lower[basicVarInds]
s = model.solution[basicVarInds]
infeasibilities = np.maximum(s - u, l - s)
m = max(infeasibilities)
if m > model.primalTolerance:
return np.argmax(infeasibilities)
return -1
def updateWeights(self, inp, spare, spare2, updatedColumn):
model = self.clpModel
pr = model.pivotRow()
model.updateColumnFT(spare, updatedColumn)
indices = updatedColumn.indices
elements = updatedColumn.elements
if updatedColumn.isInPackedMode:
if pr in indices:
ind = np.where(indices==pr)[0][0]
return elements[ind]
else:
return elements[pr]
return 0
def updatePrimalSolution(self, primalUpdate, primalRatio, objectiveChange):
model = self.clpModel
nConstraints = model.nConstraints
basicVarInds = model.basicVariables
rowNumbers = primalUpdate.indices
elements = primalUpdate.elements
nElements = primalUpdate.nElements
changeObj = 0
sol = model.solution[basicVarInds[rowNumbers]]
cost = model.cost[basicVarInds[rowNumbers]]
if primalUpdate.isInPackedMode:
change = primalRatio * elements[:nElements]
model.solution[basicVarInds[rowNumbers]] -= change
else:
change = primalRatio * elements[rowNumbers]
model.solution[basicVarInds[rowNumbers]] -= change
changeObj = -np.dot(change, cost)
primalUpdate.clear()
objectiveChange[0] += changeObj
return changeObj
def getMpsExample():
import os
import inspect
import sys
cylpDir = os.environ['CYLP_SOURCE_DIR']
return os.path.join(cylpDir, 'cylp', 'input', 'p0033.mps')
if __name__ == "__main__":
print sys.argv
if len(sys.argv) == 1:
import doctest
doctest.testmod()
else:
from cylp.cy import CyClpSimplex
from cylp.py.pivots import DualDantzigPivot
s = CyClpSimplex()
s.readMps(sys.argv[1]) # Returns 0 if OK
pivot = DualDantzigPivot(s)
s.setDualPivotMethod(pivot)
s.dual()
|
miragshin/ZeroNet
|
refs/heads/master
|
src/lib/__init__.py
|
12133432
| |
cosmiclattes/TPBviz
|
refs/heads/master
|
torrent/lib/python2.7/site-packages/django/conf/locale/vi/__init__.py
|
12133432
| |
wking/swc-amy
|
refs/heads/master
|
workshops/migrations/0050_merge.py
|
3
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0040_add_country_to_online_events'),
('workshops', '0049_auto_20150916_0544'),
]
operations = [
]
|
kelseyoo14/Wander
|
refs/heads/master
|
venv_2_7/lib/python2.7/site-packages/pandas/tests/test_config.py
|
13
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import pandas as pd
import unittest
import warnings
import nose
class TestConfig(unittest.TestCase):
_multiprocess_can_split_ = True
def __init__(self, *args):
super(TestConfig, self).__init__(*args)
from copy import deepcopy
self.cf = pd.core.config
self.gc = deepcopy(getattr(self.cf, '_global_config'))
self.do = deepcopy(getattr(self.cf, '_deprecated_options'))
self.ro = deepcopy(getattr(self.cf, '_registered_options'))
def setUp(self):
setattr(self.cf, '_global_config', {})
setattr(
self.cf, 'options', self.cf.DictWrapper(self.cf._global_config))
setattr(self.cf, '_deprecated_options', {})
setattr(self.cf, '_registered_options', {})
def tearDown(self):
setattr(self.cf, '_global_config', self.gc)
setattr(self.cf, '_deprecated_options', self.do)
setattr(self.cf, '_registered_options', self.ro)
def test_api(self):
# the pandas object exposes the user API
self.assertTrue(hasattr(pd, 'get_option'))
self.assertTrue(hasattr(pd, 'set_option'))
self.assertTrue(hasattr(pd, 'reset_option'))
self.assertTrue(hasattr(pd, 'describe_option'))
def test_is_one_of_factory(self):
v = self.cf.is_one_of_factory([None,12])
v(12)
v(None)
self.assertRaises(ValueError,v,1.1)
def test_register_option(self):
self.cf.register_option('a', 1, 'doc')
# can't register an already registered option
self.assertRaises(KeyError, self.cf.register_option, 'a', 1, 'doc')
# can't register an already registered option
self.assertRaises(KeyError, self.cf.register_option, 'a.b.c.d1', 1,
'doc')
self.assertRaises(KeyError, self.cf.register_option, 'a.b.c.d2', 1,
'doc')
# no python keywords
self.assertRaises(ValueError, self.cf.register_option, 'for', 0)
self.assertRaises(ValueError, self.cf.register_option, 'a.for.b', 0)
# must be valid identifier (ensure attribute access works)
self.assertRaises(ValueError, self.cf.register_option,
'Oh my Goddess!', 0)
# we can register options several levels deep
# without predefining the intermediate steps
# and we can define differently named options
# in the same namespace
self.cf.register_option('k.b.c.d1', 1, 'doc')
self.cf.register_option('k.b.c.d2', 1, 'doc')
def test_describe_option(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b', 1, 'doc2')
self.cf.deprecate_option('b')
self.cf.register_option('c.d.e1', 1, 'doc3')
self.cf.register_option('c.d.e2', 1, 'doc4')
self.cf.register_option('f', 1)
self.cf.register_option('g.h', 1)
self.cf.register_option('k', 2)
self.cf.deprecate_option('g.h', rkey="k")
self.cf.register_option('l', "foo")
# non-existent keys raise KeyError
self.assertRaises(KeyError, self.cf.describe_option, 'no.such.key')
# we can get the description for any key we registered
self.assertTrue(
'doc' in self.cf.describe_option('a', _print_desc=False))
self.assertTrue(
'doc2' in self.cf.describe_option('b', _print_desc=False))
self.assertTrue(
'precated' in self.cf.describe_option('b', _print_desc=False))
self.assertTrue(
'doc3' in self.cf.describe_option('c.d.e1', _print_desc=False))
self.assertTrue(
'doc4' in self.cf.describe_option('c.d.e2', _print_desc=False))
# if no doc is specified we get a default message
# saying "description not available"
self.assertTrue(
'vailable' in self.cf.describe_option('f', _print_desc=False))
self.assertTrue(
'vailable' in self.cf.describe_option('g.h', _print_desc=False))
self.assertTrue(
'precated' in self.cf.describe_option('g.h', _print_desc=False))
self.assertTrue(
'k' in self.cf.describe_option('g.h', _print_desc=False))
# default is reported
self.assertTrue(
'foo' in self.cf.describe_option('l', _print_desc=False))
# current value is reported
self.assertFalse(
'bar' in self.cf.describe_option('l', _print_desc=False))
self.cf.set_option("l","bar")
self.assertTrue(
'bar' in self.cf.describe_option('l', _print_desc=False))
def test_case_insensitive(self):
self.cf.register_option('KanBAN', 1, 'doc')
self.assertTrue(
'doc' in self.cf.describe_option('kanbaN', _print_desc=False))
self.assertEqual(self.cf.get_option('kanBaN'), 1)
self.cf.set_option('KanBan', 2)
self.assertEqual(self.cf.get_option('kAnBaN'), 2)
# gets of non-existent keys fail
self.assertRaises(KeyError, self.cf.get_option, 'no_such_option')
self.cf.deprecate_option('KanBan')
# testing warning with catch_warning was only added in 2.6
self.assertTrue(self.cf._is_deprecated('kAnBaN'))
def test_get_option(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('b.b', None, 'doc2')
# gets of existing keys succeed
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
self.assertTrue(self.cf.get_option('b.b') is None)
# gets of non-existent keys fail
self.assertRaises(KeyError, self.cf.get_option, 'no_such_option')
def test_set_option(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('b.b', None, 'doc2')
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
self.assertTrue(self.cf.get_option('b.b') is None)
self.cf.set_option('a', 2)
self.cf.set_option('b.c', 'wurld')
self.cf.set_option('b.b', 1.1)
self.assertEqual(self.cf.get_option('a'), 2)
self.assertEqual(self.cf.get_option('b.c'), 'wurld')
self.assertEqual(self.cf.get_option('b.b'), 1.1)
self.assertRaises(KeyError, self.cf.set_option, 'no.such.key', None)
def test_set_option_empty_args(self):
self.assertRaises(ValueError, self.cf.set_option)
def test_set_option_uneven_args(self):
self.assertRaises(ValueError, self.cf.set_option, 'a.b', 2, 'b.c')
def test_set_option_invalid_single_argument_type(self):
self.assertRaises(ValueError, self.cf.set_option, 2)
def test_set_option_multiple(self):
self.cf.register_option('a', 1, 'doc')
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('b.b', None, 'doc2')
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
self.assertTrue(self.cf.get_option('b.b') is None)
self.cf.set_option('a', '2', 'b.c', None, 'b.b', 10.0)
self.assertEqual(self.cf.get_option('a'), '2')
self.assertTrue(self.cf.get_option('b.c') is None)
self.assertEqual(self.cf.get_option('b.b'), 10.0)
def test_validation(self):
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2',
validator=self.cf.is_text)
self.assertRaises(ValueError, self.cf.register_option, 'a.b.c.d2',
'NO', 'doc', validator=self.cf.is_int)
self.cf.set_option('a', 2) # int is_int
self.cf.set_option('b.c', 'wurld') # str is_str
self.assertRaises(
ValueError, self.cf.set_option, 'a', None) # None not is_int
self.assertRaises(ValueError, self.cf.set_option, 'a', 'ab')
self.assertRaises(ValueError, self.cf.set_option, 'b.c', 1)
def test_reset_option(self):
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2',
validator=self.cf.is_str)
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
self.cf.set_option('a', 2)
self.cf.set_option('b.c', 'wurld')
self.assertEqual(self.cf.get_option('a'), 2)
self.assertEqual(self.cf.get_option('b.c'), 'wurld')
self.cf.reset_option('a')
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'wurld')
self.cf.reset_option('b.c')
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
def test_reset_option_all(self):
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2',
validator=self.cf.is_str)
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
self.cf.set_option('a', 2)
self.cf.set_option('b.c', 'wurld')
self.assertEqual(self.cf.get_option('a'), 2)
self.assertEqual(self.cf.get_option('b.c'), 'wurld')
self.cf.reset_option("all")
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b.c'), 'hullo')
def test_deprecate_option(self):
import sys
self.cf.deprecate_option(
'foo') # we can deprecate non-existent options
# testing warning with catch_warning was only added in 2.6
if sys.version_info[:2] < (2, 6):
raise nose.SkipTest("Need py > 2.6")
self.assertTrue(self.cf._is_deprecated('foo'))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
try:
self.cf.get_option('foo')
except KeyError:
pass
else:
self.fail("Nonexistent option didn't raise KeyError")
self.assertEqual(len(w), 1) # should have raised one warning
self.assertTrue(
'deprecated' in str(w[-1])) # we get the default message
self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int)
self.cf.register_option('b.c', 'hullo', 'doc2')
self.cf.register_option('foo', 'hullo', 'doc2')
self.cf.deprecate_option('a', removal_ver='nifty_ver')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.cf.get_option('a')
self.assertEqual(len(w), 1) # should have raised one warning
self.assertTrue(
'eprecated' in str(w[-1])) # we get the default message
self.assertTrue(
'nifty_ver' in str(w[-1])) # with the removal_ver quoted
self.assertRaises(
KeyError, self.cf.deprecate_option, 'a') # can't depr. twice
self.cf.deprecate_option('b.c', 'zounds!')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.cf.get_option('b.c')
self.assertEqual(len(w), 1) # should have raised one warning
self.assertTrue(
'zounds!' in str(w[-1])) # we get the custom message
# test rerouting keys
self.cf.register_option('d.a', 'foo', 'doc2')
self.cf.register_option('d.dep', 'bar', 'doc2')
self.assertEqual(self.cf.get_option('d.a'), 'foo')
self.assertEqual(self.cf.get_option('d.dep'), 'bar')
self.cf.deprecate_option('d.dep', rkey='d.a') # reroute d.dep to d.a
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.cf.get_option('d.dep'), 'foo')
self.assertEqual(len(w), 1) # should have raised one warning
self.assertTrue(
'eprecated' in str(w[-1])) # we get the custom message
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.cf.set_option('d.dep', 'baz') # should overwrite "d.a"
self.assertEqual(len(w), 1) # should have raised one warning
self.assertTrue(
'eprecated' in str(w[-1])) # we get the custom message
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.cf.get_option('d.dep'), 'baz')
self.assertEqual(len(w), 1) # should have raised one warning
self.assertTrue(
'eprecated' in str(w[-1])) # we get the custom message
def test_config_prefix(self):
with self.cf.config_prefix("base"):
self.cf.register_option('a', 1, "doc1")
self.cf.register_option('b', 2, "doc2")
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b'), 2)
self.cf.set_option('a', 3)
self.cf.set_option('b', 4)
self.assertEqual(self.cf.get_option('a'), 3)
self.assertEqual(self.cf.get_option('b'), 4)
self.assertEqual(self.cf.get_option('base.a'), 3)
self.assertEqual(self.cf.get_option('base.b'), 4)
self.assertTrue(
'doc1' in self.cf.describe_option('base.a', _print_desc=False))
self.assertTrue(
'doc2' in self.cf.describe_option('base.b', _print_desc=False))
self.cf.reset_option('base.a')
self.cf.reset_option('base.b')
with self.cf.config_prefix("base"):
self.assertEqual(self.cf.get_option('a'), 1)
self.assertEqual(self.cf.get_option('b'), 2)
def test_callback(self):
k = [None]
v = [None]
def callback(key):
k.append(key)
v.append(self.cf.get_option(key))
self.cf.register_option('d.a', 'foo', cb=callback)
self.cf.register_option('d.b', 'foo', cb=callback)
del k[-1], v[-1]
self.cf.set_option("d.a", "fooz")
self.assertEqual(k[-1], "d.a")
self.assertEqual(v[-1], "fooz")
del k[-1], v[-1]
self.cf.set_option("d.b", "boo")
self.assertEqual(k[-1], "d.b")
self.assertEqual(v[-1], "boo")
del k[-1], v[-1]
self.cf.reset_option("d.b")
self.assertEqual(k[-1], "d.b")
def test_set_ContextManager(self):
def eq(val):
self.assertEqual(self.cf.get_option("a"), val)
self.cf.register_option('a', 0)
eq(0)
with self.cf.option_context("a", 15):
eq(15)
with self.cf.option_context("a", 25):
eq(25)
eq(15)
eq(0)
self.cf.set_option("a", 17)
eq(17)
def test_attribute_access(self):
holder = []
def f():
options.b = 1
def f2():
options.display = 1
def f3(key):
holder.append(True)
self.cf.register_option('a', 0)
self.cf.register_option('c', 0, cb=f3)
options = self.cf.options
self.assertEqual(options.a, 0)
with self.cf.option_context("a", 15):
self.assertEqual(options.a, 15)
options.a = 500
self.assertEqual(self.cf.get_option("a"), 500)
self.cf.reset_option("a")
self.assertEqual(options.a, self.cf.get_option("a", 0))
self.assertRaises(KeyError, f)
self.assertRaises(KeyError, f2)
# make sure callback kicks when using this form of setting
options.c = 1
self.assertEqual(len(holder), 1)
def test_option_context_scope(self):
# Ensure that creating a context does not affect the existing
# environment as it is supposed to be used with the `with` statement.
# See https://github.com/pydata/pandas/issues/8514
original_value = 60
context_value = 10
option_name = 'a'
self.cf.register_option(option_name, original_value)
# Ensure creating contexts didn't affect the current context.
ctx = self.cf.option_context(option_name, context_value)
self.assertEqual(self.cf.get_option(option_name), original_value)
# Ensure the correct value is available inside the context.
with ctx:
self.assertEqual(self.cf.get_option(option_name), context_value)
# Ensure the current context is reset
self.assertEqual(self.cf.get_option(option_name), original_value)
|
dillia23/code-dot-org
|
refs/heads/staging
|
blockly-core/closure-library-read-only/closure/bin/build/depstree.py
|
455
|
# Copyright 2009 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to represent a full Closure Library dependency tree.
Offers a queryable tree of dependencies of a given set of sources. The tree
will also do logical validation to prevent duplicate provides and circular
dependencies.
"""
__author__ = 'nnaze@google.com (Nathan Naze)'
class DepsTree(object):
"""Represents the set of dependencies between source files."""
def __init__(self, sources):
"""Initializes the tree with a set of sources.
Args:
sources: A set of JavaScript sources.
Raises:
MultipleProvideError: A namespace is provided by muplitple sources.
NamespaceNotFoundError: A namespace is required but never provided.
"""
self._sources = sources
self._provides_map = dict()
# Ensure nothing was provided twice.
for source in sources:
for provide in source.provides:
if provide in self._provides_map:
raise MultipleProvideError(
provide, [self._provides_map[provide], source])
self._provides_map[provide] = source
# Check that all required namespaces are provided.
for source in sources:
for require in source.requires:
if require not in self._provides_map:
raise NamespaceNotFoundError(require, source)
def GetDependencies(self, required_namespaces):
"""Get source dependencies, in order, for the given namespaces.
Args:
required_namespaces: A string (for one) or list (for one or more) of
namespaces.
Returns:
A list of source objects that provide those namespaces and all
requirements, in dependency order.
Raises:
NamespaceNotFoundError: A namespace is requested but doesn't exist.
CircularDependencyError: A cycle is detected in the dependency tree.
"""
if isinstance(required_namespaces, str):
required_namespaces = [required_namespaces]
deps_sources = []
for namespace in required_namespaces:
for source in DepsTree._ResolveDependencies(
namespace, [], self._provides_map, []):
if source not in deps_sources:
deps_sources.append(source)
return deps_sources
@staticmethod
def _ResolveDependencies(required_namespace, deps_list, provides_map,
traversal_path):
"""Resolve dependencies for Closure source files.
Follows the dependency tree down and builds a list of sources in dependency
order. This function will recursively call itself to fill all dependencies
below the requested namespaces, and then append its sources at the end of
the list.
Args:
required_namespace: String of required namespace.
deps_list: List of sources in dependency order. This function will append
the required source once all of its dependencies are satisfied.
provides_map: Map from namespace to source that provides it.
traversal_path: List of namespaces of our path from the root down the
dependency/recursion tree. Used to identify cyclical dependencies.
This is a list used as a stack -- when the function is entered, the
current namespace is pushed and popped right before returning.
Each recursive call will check that the current namespace does not
appear in the list, throwing a CircularDependencyError if it does.
Returns:
The given deps_list object filled with sources in dependency order.
Raises:
NamespaceNotFoundError: A namespace is requested but doesn't exist.
CircularDependencyError: A cycle is detected in the dependency tree.
"""
source = provides_map.get(required_namespace)
if not source:
raise NamespaceNotFoundError(required_namespace)
if required_namespace in traversal_path:
traversal_path.append(required_namespace) # do this *after* the test
# This must be a cycle.
raise CircularDependencyError(traversal_path)
# If we don't have the source yet, we'll have to visit this namespace and
# add the required dependencies to deps_list.
if source not in deps_list:
traversal_path.append(required_namespace)
for require in source.requires:
# Append all other dependencies before we append our own.
DepsTree._ResolveDependencies(require, deps_list, provides_map,
traversal_path)
deps_list.append(source)
traversal_path.pop()
return deps_list
class BaseDepsTreeError(Exception):
"""Base DepsTree error."""
def __init__(self):
Exception.__init__(self)
class CircularDependencyError(BaseDepsTreeError):
"""Raised when a dependency cycle is encountered."""
def __init__(self, dependency_list):
BaseDepsTreeError.__init__(self)
self._dependency_list = dependency_list
def __str__(self):
return ('Encountered circular dependency:\n%s\n' %
'\n'.join(self._dependency_list))
class MultipleProvideError(BaseDepsTreeError):
"""Raised when a namespace is provided more than once."""
def __init__(self, namespace, sources):
BaseDepsTreeError.__init__(self)
self._namespace = namespace
self._sources = sources
def __str__(self):
source_strs = map(str, self._sources)
return ('Namespace "%s" provided more than once in sources:\n%s\n' %
(self._namespace, '\n'.join(source_strs)))
class NamespaceNotFoundError(BaseDepsTreeError):
"""Raised when a namespace is requested but not provided."""
def __init__(self, namespace, source=None):
BaseDepsTreeError.__init__(self)
self._namespace = namespace
self._source = source
def __str__(self):
msg = 'Namespace "%s" never provided.' % self._namespace
if self._source:
msg += ' Required in %s' % self._source
return msg
|
AZed/duplicity
|
refs/heads/master
|
testing/tests/statisticstest.py
|
5
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <ben@emerose.org>
# Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import helper
import sys, unittest
from duplicity.statistics import * #@UnusedWildImport
from duplicity import path
helper.setup()
class StatsObjTest(unittest.TestCase):
"""Test StatsObj class"""
def setUp(self):
assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1")
def tearDown(self):
assert not os.system("rm -rf testfiles tempdir temp2.tar")
def set_obj(self, s):
"""Set values of s's statistics"""
s.SourceFiles = 1
s.SourceFileSize = 2
s.NewFiles = 3
s.NewFileSize = 4
s.DeletedFiles = 5
s.ChangedFiles = 7
s.ChangedFileSize = 8
s.ChangedDeltaSize = 9
s.DeltaEntries = 10
s.RawDeltaSize = 11
s.TotalDestinationSizeChange = 12
s.StartTime = 13
s.EndTime = 14
def test_get_stats(self):
"""Test reading and writing stat objects"""
s = StatsObj()
assert s.get_stat('SourceFiles') is None
self.set_obj(s)
assert s.get_stat('SourceFiles') == 1
s1 = StatsDeltaProcess()
assert s1.get_stat('SourceFiles') == 0
def test_get_stats_string(self):
"""Test conversion of stat object into string"""
s = StatsObj()
stats_string = s.get_stats_string()
assert stats_string == "", stats_string
self.set_obj(s)
stats_string = s.get_stats_string()
assert stats_string == \
"""StartTime 13.00 (Wed Dec 31 18:00:13 1969)
EndTime 14.00 (Wed Dec 31 18:00:14 1969)
ElapsedTime 1.00 (1 second)
SourceFiles 1
SourceFileSize 2 (2 bytes)
NewFiles 3
NewFileSize 4 (4 bytes)
DeletedFiles 5
ChangedFiles 7
ChangedFileSize 8 (8 bytes)
ChangedDeltaSize 9 (9 bytes)
DeltaEntries 10
RawDeltaSize 11 (11 bytes)
TotalDestinationSizeChange 12 (12 bytes)
""", "'%s'" % stats_string
def test_line_string(self):
"""Test conversion to a single line"""
s = StatsObj()
self.set_obj(s)
statline = s.get_stats_line(("sample", "index", "w", "new\nline"))
assert statline == "sample/index/w/new\\nline 1 2 3 4 5 7 8 9 10 11",\
repr(statline)
statline = s.get_stats_line(())
assert statline == ". 1 2 3 4 5 7 8 9 10 11"
statline = s.get_stats_line(("file name with spaces",))
assert statline == ("file\\x20name\\x20with\\x20spaces " \
"1 2 3 4 5 7 8 9 10 11"), repr(statline)
def test_byte_summary(self):
"""Test conversion of bytes to strings like 7.23MB"""
s = StatsObj()
f = s.get_byte_summary_string
assert f(1) == "1 byte"
assert f(234.34) == "234 bytes"
assert f(2048) == "2.00 KB"
assert f(3502243) == "3.34 MB"
assert f(314992230) == "300 MB"
assert f(36874871216) == "34.3 GB", f(36874871216)
assert f(3775986812573450) == "3434 TB"
def test_init_stats(self):
"""Test setting stat object from string"""
s = StatsObj()
s.set_stats_from_string("NewFiles 3 hello there")
for attr in s.stat_attrs:
if attr == 'NewFiles': assert s.get_stat(attr) == 3
else: assert s.get_stat(attr) is None, (attr, s.__dict__[attr])
s1 = StatsObj()
self.set_obj(s1)
assert not s1.stats_equal(s)
s2 = StatsObj()
s2.set_stats_from_string(s1.get_stats_string())
assert s1.stats_equal(s2)
def test_write_path(self):
"""Test reading and writing of statistics object"""
p = path.Path("testfiles/statstest")
if p.exists(): p.delete()
s = StatsObj()
self.set_obj(s)
s.write_stats_to_path(p)
s2 = StatsObj()
assert not s2.stats_equal(s)
s2.read_stats_from_path(p)
assert s2.stats_equal(s)
def testAverage(self):
"""Test making an average statsobj"""
s1 = StatsObj()
s1.StartTime = 5
s1.EndTime = 10
s1.ElapsedTime = 5
s1.ChangedFiles = 2
s1.SourceFiles = 100
s1.NewFileSize = 4
s2 = StatsObj()
s2.StartTime = 25
s2.EndTime = 35
s2.ElapsedTime = 10
s2.ChangedFiles = 1
s2.SourceFiles = 50
s2.DeletedFiles = 0
s3 = StatsObj().set_to_average([s1, s2])
assert s3.StartTime is s3.EndTime is None
assert s3.ElapsedTime == 7.5
assert s3.DeletedFiles is s3.NewFileSize is None, (s3.DeletedFiles,
s3.NewFileSize)
assert s3.ChangedFiles == 1.5
assert s3.SourceFiles == 75
if __name__ == "__main__":
unittest.main()
|
fayf/pyload
|
refs/heads/stable
|
module/plugins/hoster/QuickshareCz.py
|
12
|
# -*- coding: utf-8 -*-
import pycurl
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class QuickshareCz(SimpleHoster):
__name__ = "QuickshareCz"
__type__ = "hoster"
__version__ = "0.57"
__status__ = "testing"
__pattern__ = r'http://(?:[^/]*\.)?quickshare\.cz/stahnout-soubor/.+'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Quickshare.cz hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
NAME_PATTERN = r'<th width="145px">Název:</th>\s*<td style="word-wrap:break-word;">(?P<N>[^<]+)</td>'
SIZE_PATTERN = r'<th>Velikost:</th>\s*<td>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</td>'
OFFLINE_PATTERN = r'<script type="text/javascript">location\.href=\'/chyba\';</script>'
def process(self, pyfile):
self.html = self.load(pyfile.url)
self.get_fileInfo()
#: Parse js variables
self.jsvars = dict((x, y.strip("'")) for x, y in re.findall(r"var (\w+) = ([\d.]+|'.+?')", self.html))
self.log_debug(self.jsvars)
pyfile.name = self.jsvars['ID3']
#: Determine download type - free or premium
if self.premium:
if 'UU_prihlasen' in self.jsvars:
if self.jsvars['UU_prihlasen'] == "0":
self.log_warning(_("User not logged in"))
self.relogin(self.user)
self.retry()
elif float(self.jsvars['UU_kredit']) < float(self.jsvars['kredit_odecet']):
self.log_warning(_("Not enough credit left"))
self.premium = False
if self.premium:
self.handle_premium(pyfile)
else:
self.handle_free(pyfile)
if self.check_download({'error': re.compile(r"\AChyba!")}, max_size=100):
self.fail(_("File not m or plugin defect"))
def handle_free(self, pyfile):
#: Get download url
download_url = '%s/download.php' % self.jsvars['server']
data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID3", "ID4"))
self.log_debug("FREE URL1:" + download_url, data)
self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
self.load(download_url, post=data)
self.header = self.req.http.header
self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
m = re.search(r'Location\s*:\s*(.+)', self.header, re.I)
if m is None:
self.fail(_("File not found"))
self.link = m.group(1).rstrip() #@TODO: Remove .rstrip() in 0.4.10
self.log_debug("FREE URL2:" + self.link)
#: Check errors
m = re.search(r'/chyba/(\d+)', self.link)
if m:
if m.group(1) == "1":
self.retry(60, 2 * 60, "This IP is already downloading")
elif m.group(1) == "2":
self.retry(60, 60, "No free slots available")
else:
self.fail(_("Error %d") % m.group(1))
def handle_premium(self, pyfile):
download_url = '%s/download_premium.php' % self.jsvars['server']
data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID4", "ID5"))
self.download(download_url, get=data)
getInfo = create_getInfo(QuickshareCz)
|
jobiols/odoomrp-wip
|
refs/heads/8.0
|
stock_planning/wizard/wiz_stock_planning.py
|
8
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api, exceptions, _
from dateutil.relativedelta import relativedelta
class WizStockPlanning(models.TransientModel):
_name = 'wiz.stock.planning'
_description = 'Wiz Stock Planning'
@api.multi
def _def_company(self):
return self.env.user.company_id.id
company = fields.Many2one(
'res.company', 'Company', default=_def_company, required=True)
from_date = fields.Date(
'From date', required=True,
default=lambda self: fields.Date.context_today(self),
help='Date from which the interval starts counting days')
days = fields.Integer(
'Days interval', required=True, default=1,
help='Increase number of days starting from the date from')
to_date = fields.Date('To date', required=True,
help='Deadline for calculating periods')
category = fields.Many2one(
'product.category', 'Category',
help='Enter this field if you want to filter by category')
template = fields.Many2one(
'product.template', 'Template',
help='Enter this field if you want to filter by template')
product = fields.Many2one(
'product.product', 'Product',
help='Enter this field if you want to filter by product')
locations = fields.Many2many(
comodel_name='stock.location', relation='rel_stock_planning_location',
column1='wiz_stock_planning_id', column2='locations_id',
string='Locations')
@api.multi
def calculate_stock_planning(self):
self.ensure_one()
planning_obj = self.env['stock.planning']
move_obj = self.env['stock.move']
proc_obj = self.env['procurement.order']
if self.days < 1:
raise exceptions.Warning(
_('Error!: Increase number of days must be greater than zero'))
cond = [('company', '=', self.company.id)]
if self.locations:
cond.append(('location', 'in', self.locations.ids))
if self.product:
cond.append(('product', '=', self.product.id))
planning = planning_obj.search(cond)
planning.unlink()
fdate = self.from_date
product_datas = {}
while fdate < self.to_date:
fdate = fields.Date.to_string(fields.Date.from_string(fdate) +
relativedelta(days=self.days))
fdate = fields.Date.to_string(fields.Date.from_string(fdate) -
relativedelta(days=self.days))
for move in move_obj._find_moves_from_stock_planning(
self.company, fdate, category=self.category,
template=self.template, product=self.product):
if move.location_id.usage == 'internal':
if (not self.locations or
(self.locations and move.location_id.id in
self.locations.ids)):
product_datas = self._find_product_in_table(
product_datas, move.product_id, move.location_id)
if move.location_dest_id.usage == 'internal':
if (not self.locations or
(self.locations and move.location_dest_id.id in
self.locations.ids)):
product_datas = self._find_product_in_table(
product_datas, move.product_id, move.location_dest_id)
for procurement in proc_obj._find_procurements_from_stock_planning(
self.company, fdate, category=self.category,
template=self.template, product=self.product, periods=True):
if (not self.locations or
(self.locations and procurement.location_id.id in
self.locations.ids)):
product_datas = self._find_product_in_table(
product_datas, procurement.product_id,
procurement.location_id)
self._generate_stock_planning(product_datas)
return {'name': _('Stock Planning'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'stock.planning',
}
def _find_product_in_table(self, product_datas, product, location):
found = False
for data in product_datas:
datos_array = product_datas[data]
dproduct = datos_array['product']
dlocation = datos_array['location']
if dproduct.id == product.id and dlocation.id == location.id:
found = True
if not found:
my_vals = {'product': product,
'location': location,
}
ind = product.id + location.id
product_datas[(ind)] = my_vals
return product_datas
def _generate_stock_planning(self, product_datas):
planning_obj = self.env['stock.planning']
for data in product_datas:
datos_array = product_datas[data]
fdate = self.from_date
from_date = False
while fdate < self.to_date:
vals = {'company': self.company.id,
'location': datos_array['location'].id,
'scheduled_date': fdate,
'product': datos_array['product'].id}
if from_date:
vals['from_date'] = from_date
planning_obj.create(vals)
from_date = fields.Date.to_string(
fields.Date.from_string(fdate) + relativedelta(days=1))
fdate = fields.Date.to_string(fields.Date.from_string(fdate) +
relativedelta(days=self.days))
|
openfun/edx-platform
|
refs/heads/master
|
docs/shared/conf.py
|
158
|
# -*- coding: utf-8 -*-
#
# getting_started documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 16 11:19:12 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# -----------------------------------------------------------------------------
# Common config
#
# This file is imported by the different project conf.py files (in
# course_authors/, data/, and developers/). It includes configuration options
# common to all three.
#
# -----------------------------------------------------------------------------
import os
BASEDIR = os.path.dirname(os.path.abspath(__file__))
def add_base(paths):
"""
Returns a list of paths relative to BASEDIR.
paths: a list of paths
"""
return [os.path.join(BASEDIR, x) for x in paths]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = add_base(['_templates'])
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'edX'
copyright = u'2013, EdX Doc Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<Studio> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = add_base(['_static'])
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'edxdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
'index',
'getting_started.tex',
u'edX Studio Documentation',
u'EdX Doc Team',
'manual',
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'getting_started', u'getting_started Documentation',
[u'EdX Doc Team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
'index',
'getting_started',
u'getting_started Documentation',
u'EdX Doc Team',
'getting_started',
'One line description of project.',
'Miscellaneous',
),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'getting_started'
epub_author = u'EdX Doc Team'
epub_publisher = u'EdX Doc Team'
epub_copyright = u'2013, EdX Doc Team'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# If 'no', URL addresses will not be shown.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
kyl191/ansible
|
refs/heads/devel
|
lib/ansible/playbook/task.py
|
10
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six import iteritems, string_types
from ansible.errors import AnsibleError
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.splitter import parse_kv
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
from ansible.plugins import module_loader, lookup_loader
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.block import Block
from ansible.playbook.conditional import Conditional
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
__all__ = ['Task']
try:
from __main__ import display
display = display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Task(Base, Conditional, Taggable, Become):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
_args = FieldAttribute(isa='dict', default=dict())
_action = FieldAttribute(isa='string')
_always_run = FieldAttribute(isa='bool')
_any_errors_fatal = FieldAttribute(isa='bool')
_async = FieldAttribute(isa='int', default=0)
_changed_when = FieldAttribute(isa='string')
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_failed_when = FieldAttribute(isa='string')
_first_available_file = FieldAttribute(isa='list')
_ignore_errors = FieldAttribute(isa='bool')
_loop = FieldAttribute(isa='string', private=True)
_loop_args = FieldAttribute(isa='list', private=True)
_local_action = FieldAttribute(isa='string')
_name = FieldAttribute(isa='string', default='')
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int')
_register = FieldAttribute(isa='string')
_retries = FieldAttribute(isa='int', default=1)
_run_once = FieldAttribute(isa='bool')
_until = FieldAttribute(isa='list') # ?
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
self._block = block
self._role = role
self._task_include = task_include
# special flag for local_action: tasks, to make sure their
# connection type of local isn't overridden incorrectly
self._local_action = False
super(Task, self).__init__()
def get_name(self):
''' return the name of the task '''
if self._role and self.name:
return "%s : %s" % (self._role.get_name(), self.name)
elif self.name:
return self.name
else:
flattened_args = self._merge_kv(self.args)
if self._role:
return "%s : %s %s" % (self._role.get_name(), self.action, flattened_args)
else:
return "%s %s" % (self.action, flattened_args)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k,v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k,v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def load_data(self, ds, variable_manager=None, loader=None):
'''
We override load_data for tasks so that we can pull special flags
out of the task args and set them internaly only so the user never
sees them.
'''
t = super(Task, self).load_data(ds=ds, variable_manager=variable_manager, loader=loader)
t._local_action = t.args.pop('_local_action', False)
return t
def __repr__(self):
''' returns a human readable representation of the task '''
return "TASK: %s" % self.get_name()
def _preprocess_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop'] = loop_name
new_ds['loop_args'] = v
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
assert isinstance(ds, dict)
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds)
(action, args, connection) = args_parser.parse()
new_ds['action'] = action
new_ds['args'] = args
new_ds['connection'] = connection
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.pop('vars'))
else:
new_ds['vars'] = dict()
for (k,v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'connection') or k == action or k == 'shell':
# we don't want to re-assign these values, which were
# determined by the ModuleArgsParser() above
continue
elif k.replace("with_", "") in lookup_loader:
self._preprocess_loop(ds, new_ds, k, v)
else:
# pre-2.0 syntax allowed variables for include statements at the
# top level of the task, so we move those into the 'vars' dictionary
# here, and show a deprecation message as we will remove this at
# some point in the future.
if action == 'include' and k not in self._get_base_attributes() and k not in self.DEPRECATED_ATTRIBUTES:
self._display.deprecated("Specifying include variables at the top-level of the task is deprecated. Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\nfor currently supported syntax regarding included files and variables")
new_ds['vars'][k] = v
else:
new_ds[k] = v
return super(Task, self).preprocess_data(new_ds)
def _load_any_errors_fatal(self, attr, value):
'''
Exists only to show a deprecation warning, as this attribute is not valid
at the task level.
'''
display.deprecated("Setting any_errors_fatal on a task is no longer supported. This should be set at the play level only")
return None
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._block:
self._block.post_validate(templar)
if self._task_include:
self._task_include.post_validate(templar)
super(Task, self).post_validate(templar)
def _post_validate_loop_args(self, attr, value, templar):
'''
Override post validation for the loop args field, which is templated
specially in the TaskExecutor class when evaluating loops.
'''
return value
def _post_validate_environment(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
if value is None:
return dict()
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
self._display.deprecated("Using bare variables for environment is deprecated. Update your playbooks so that the environment value uses the full variable syntax ('{{foo}}')")
break
return templar.template(value, convert_bare=True)
def get_vars(self):
all_vars = dict()
if self._block:
all_vars.update(self._block.get_vars())
if self._task_include:
all_vars.update(self._task_include.get_vars())
all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
def copy(self, exclude_block=False):
new_me = super(Task, self).copy()
new_me._local_action = self._local_action
new_me._block = None
if self._block and not exclude_block:
new_me._block = self._block.copy()
new_me._role = None
if self._role:
new_me._role = self._role
new_me._task_include = None
if self._task_include:
new_me._task_include = self._task_include.copy(exclude_block=exclude_block)
return new_me
def serialize(self):
data = super(Task, self).serialize()
data['_local_action'] = self._local_action
if self._block:
data['block'] = self._block.serialize()
if self._role:
data['role'] = self._role.serialize()
if self._task_include:
data['task_include'] = self._task_include.serialize()
return data
def deserialize(self, data):
# import is here to avoid import loops
#from ansible.playbook.task_include import TaskInclude
block_data = data.get('block')
self._local_action = data.get('_local_action', False)
if block_data:
b = Block()
b.deserialize(block_data)
self._block = b
del data['block']
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
del data['role']
ti_data = data.get('task_include')
if ti_data:
#ti = TaskInclude()
ti = Task()
ti.deserialize(ti_data)
self._task_include = ti
del data['task_include']
super(Task, self).deserialize(data)
def evaluate_conditional(self, templar, all_vars):
if self._block is not None:
if not self._block.evaluate_conditional(templar, all_vars):
return False
if self._task_include is not None:
if not self._task_include.evaluate_conditional(templar, all_vars):
return False
return super(Task, self).evaluate_conditional(templar, all_vars)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
This is used primarily after the Task has been serialized/deserialized, which
does not preserve the loader.
'''
self._loader = loader
if self._block:
self._block.set_loader(loader)
if self._task_include:
self._task_include.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False):
'''
Generic logic to get the attribute or parent attribute for a task value.
'''
value = None
try:
value = self._attributes[attr]
if self._block and (value is None or extend):
parent_value = getattr(self._block, attr)
if extend:
value = self._extend_value(value, parent_value)
else:
value = parent_value
if self._task_include and (value is None or extend):
parent_value = getattr(self._task_include, attr)
if extend:
value = self._extend_value(value, parent_value)
else:
value = parent_value
except KeyError:
pass
return value
def _get_attr_environment(self):
'''
Override for the 'tags' getattr fetcher, used from Base.
'''
environment = self._attributes['environment']
if environment is None:
environment = self._get_parent_attribute('environment')
return environment
|
elainenaomi/sciwonc-dataflow-examples
|
refs/heads/master
|
dissertation2017/Experiment 1A/instances/10_2_workflow_full_10files_secondary_wmj_3sh_3rs_with_annot_with_proj_3s_range/work/ubuntu/pegasus/example_workflow/20161107T124001+0000/ConfigDB_AverageRatioEvent_0.py
|
11
|
HOST = "ip-172-31-29-102.us-west-2.compute.internal:27017,ip-172-31-29-103.us-west-2.compute.internal:27017,ip-172-31-29-104.us-west-2.compute.internal:27017,ip-172-31-29-105.us-west-2.compute.internal:27017,ip-172-31-29-101.us-west-2.compute.internal:27017,ip-172-31-29-106.us-west-2.compute.internal:27017,ip-172-31-29-107.us-west-2.compute.internal:27017,ip-172-31-29-108.us-west-2.compute.internal:27017,ip-172-31-29-109.us-west-2.compute.internal:27017"
PORT = ""
USER = ""
PASSWORD = ""
DATABASE = "googler"
READ_PREFERENCE = "secondary"
WRITE_CONCERN = "majority"
COLLECTION_INPUT = "ratio"
COLLECTION_OUTPUT = "average_ratioevent"
PREFIX_COLUMN = "g_"
ATTRIBUTES = ["event type", "sds from all avg ratio"]
SORT = ["_id.filepath", "_id.numline"]
OPERATION_TYPE = "GROUP_BY_COLUMN"
COLUMN = "event type"
VALUE = ["2"]
INPUT_FILE = "ratio_cpu_memory.csv"
OUTPUT_FILE = "mean_ratio_cpu_memory_0.csv"
|
swcarpentry/amy
|
refs/heads/develop
|
amy/extforms/migrations/__init__.py
|
12133432
| |
gorbyo/admin_couchdb
|
refs/heads/master
|
admin_couchdb/__init__.py
|
12133432
| |
jswope00/griffinx
|
refs/heads/master
|
cms/djangoapps/contentstore/management/__init__.py
|
12133432
| |
harisibrahimkv/django
|
refs/heads/master
|
tests/generic_views/__init__.py
|
12133432
| |
EricSB/nupic
|
refs/heads/master
|
examples/opf/experiments/multistep/hotgym/permutations_sp.py
|
3
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by ExpGenerator to generate the actual
permutations.py file by replacing $XXXXXXXX tokens with desired values.
This permutations.py file was generated by:
'~/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.pyc'
"""
from nupic.swarming.permutationhelpers import *
# The name of the field being predicted. Any allowed permutation MUST contain
# the prediction field.
# (generated from PREDICTION_FIELD)
predictedField = 'consumption'
ENC_WIDTH = 21
permutations = {
'inferenceType': 'NontemporalMultiStep',
'tpEnable': False,
# Encoder permutation choices
# Example:
#
# '__gym_encoder' : PermuteEncoder('gym', 'SDRCategoryEncoder', w=7,
# n=100),
#
# '__address_encoder' : PermuteEncoder('address', 'SDRCategoryEncoder',
# w=7, n=100),
#
# '__timestamp_timeOfDay_encoder' : PermuteEncoder('timestamp',
# 'DateEncoder.timeOfDay', w=7, radius=PermuteChoices([1, 8])),
#
# '__timestamp_dayOfWeek_encoder' : PermuteEncoder('timestamp',
# 'DateEncoder.dayOfWeek', w=7, radius=PermuteChoices([1, 3])),
#
# '__consumption_encoder' : PermuteEncoder('consumption', 'ScalarEncoder',
# w=7, n=PermuteInt(13, 500, 20), minval=0,
# maxval=PermuteInt(100, 300, 25)),
#
# (generated from PERM_ENCODER_CHOICES)
'__timestamp_timeOfDay_encoder' : PermuteEncoder(fieldName='timestamp',
encoderClass='DateEncoder.timeOfDay',
w=ENC_WIDTH, radius=PermuteFloat(0.5, 12)),
'__timestamp_dayOfWeek_encoder' : PermuteEncoder(fieldName='timestamp',
encoderClass='DateEncoder.dayOfWeek', w=ENC_WIDTH,
radius=PermuteFloat(1, 6)),
'__timestamp_weekend_encoder' : PermuteEncoder(fieldName='timestamp',
encoderClass='DateEncoder.weekend', w=ENC_WIDTH,
radius=PermuteChoices([1])),
'__consumption_encoder' : PermuteEncoder(fieldName='consumption',
encoderClass='AdaptiveScalarEncoder', w=ENC_WIDTH,
n=PermuteInt(28, 521), clipInput=True),
'tpSegmentActivationThreshold': 14,
'tpMinSegmentMatchSynapseThreshold': 12,
}
# Fields selected for final hypersearch report;
# NOTE: These values are used as regular expressions by RunPermutations.py's
# report generator
# (fieldname values generated from PERM_PREDICTED_FIELD_NAME)
report = [
'.*consumption.*',
]
# Permutation optimization setting: either minimize or maximize metric
# used by RunPermutations.
# NOTE: The value is used as a regular expressions by RunPermutations.py's
# report generator
# (generated from minimize = 'prediction:aae:window=1000:field=consumption')
minimize = "multiStepBestPredictions:multiStep:errorMetric='aae':steps=1:window=1000:field=consumption"
def permutationFilter(perm):
""" This function can be used to selectively filter out specific permutation
combinations. It is called by RunPermutations for every possible permutation
of the variables in the permutations dict. It should return True for valid a
combination of permutation values and False for an invalid one.
Parameters:
---------------------------------------------------------
perm: dict of one possible combination of name:value
pairs chosen from permutations.
"""
# An example of how to use this
#if perm['__consumption_encoder']['maxval'] > 300:
# return False;
#
return True
|
orchidinfosys/odoo
|
refs/heads/master
|
addons/payment_ogone/data/__init__.py
|
895
|
# -*- coding: utf-8 -*-
import ogone
|
c2corg/v6_ui
|
refs/heads/master
|
c2corg_ui/views/health.py
|
1
|
import logging
from c2corg_ui.caching import cache_document_detail
from c2corg_ui.views import call_api
from pyramid.view import view_config
from os.path import isfile
log = logging.getLogger(__name__)
class Health(object):
def __init__(self, request):
self.request = request
@view_config(route_name='health', renderer='json')
def get(self):
""" Returns information about the version of the UI and the status
of its components:
- Git revision
- API status
- Redis status
- Number of keys in Redis
- Maintenance mode status
"""
status = {
'version': self.request.registry.settings.get('cache_version')
}
self._add_redis_status(status)
self._add_api_status(status)
self._add_maintenance_mode_status(status)
return status
def _add_redis_status(self, status):
redis_keys = None
success = False
try:
client = cache_document_detail.backend.client
redis_keys = client.dbsize()
success = True
except: # noqa
log.exception('Getting redis keys failed')
status['redis'] = 'ok' if success else 'error'
status['redis_keys'] = redis_keys
def _add_api_status(self, status):
api_status = None
success = False
try:
resp, api_status = call_api(
self.request.registry.settings, 'health')
if resp.status_code == 200:
success = True
except: # noqa
log.exception('Getting api status failed')
if not success:
self.request.response.status_code = 500
status['api'] = 'ok' if success else 'error'
status['api_status'] = api_status
def _add_maintenance_mode_status(self, status):
maintenance_mode = False
maintenance_file = 'maintenance_mode.txt'
if isfile(maintenance_file):
maintenance_mode = True
log.warn(
'service is in maintenance mode, remove %s to reenable.' %
maintenance_file)
self.request.response.status_code = 404
status['maintenance_mode'] = maintenance_mode
|
kdwink/intellij-community
|
refs/heads/master
|
python/lib/Lib/distutils/emxccompiler.py
|
140
|
"""distutils.emxccompiler
Provides the EMXCCompiler class, a subclass of UnixCCompiler that
handles the EMX port of the GNU C compiler to OS/2.
"""
# issues:
#
# * OS/2 insists that DLLs can have names no longer than 8 characters
# We put export_symbols in a def-file, as though the DLL can have
# an arbitrary length name, but truncate the output filename.
#
# * only use OMF objects and use LINK386 as the linker (-Zomf)
#
# * always build for multithreading (-Zmt) as the accompanying OS/2 port
# of Python is only distributed with threads enabled.
#
# tested configurations:
#
# * EMX gcc 2.81/EMX 0.9d fix03
__revision__ = "$Id: emxccompiler.py 34786 2003-12-02 12:17:59Z aimacintyre $"
import os,sys,copy
from distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.unixccompiler import UnixCCompiler
from distutils.file_util import write_file
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
from distutils import log
class EMXCCompiler (UnixCCompiler):
compiler_type = 'emx'
obj_extension = ".obj"
static_lib_extension = ".lib"
shared_lib_extension = ".dll"
static_lib_format = "%s%s"
shared_lib_format = "%s%s"
res_extension = ".res" # compiled resource file
exe_extension = ".exe"
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
UnixCCompiler.__init__ (self, verbose, dry_run, force)
(status, details) = check_config_h()
self.debug_print("Python's GCC status: %s (details: %s)" %
(status, details))
if status is not CONFIG_H_OK:
self.warn(
"Python's pyconfig.h doesn't seem to support your compiler. " +
("Reason: %s." % details) +
"Compiling may fail because of undefined preprocessor macros.")
(self.gcc_version, self.ld_version) = \
get_versions()
self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
(self.gcc_version,
self.ld_version) )
# Hard-code GCC because that's what this is all about.
# XXX optimization, warnings etc. should be customizable.
self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
linker_exe='gcc -Zomf -Zmt -Zcrtdll',
linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
# want the gcc library statically linked (so that we don't have
# to distribute a version dependent on the compiler we have)
self.dll_libraries=["gcc"]
# __init__ ()
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
if ext == '.rc':
# gcc requires '.rc' compiled to binary ('.res') files !!!
try:
self.spawn(["rc", "-r", src])
except DistutilsExecError, msg:
raise CompileError, msg
else: # for other files use the C-compiler
try:
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# use separate copies, so we can modify the lists
extra_preargs = copy.copy(extra_preargs or [])
libraries = copy.copy(libraries or [])
objects = copy.copy(objects or [])
# Additional libraries
libraries.extend(self.dll_libraries)
# handle export symbols by creating a def-file
# with executables this only works with gcc/ld as linker
if ((export_symbols is not None) and
(target_desc != self.EXECUTABLE)):
# (The linker doesn't do anything if output is up-to-date.
# So it would probably better to check if we really need this,
# but for this we had to insert some unchanged parts of
# UnixCCompiler, and this is not what we want.)
# we want to put some files in the same directory as the
# object files are, build_temp doesn't help much
# where are the object files
temp_dir = os.path.dirname(objects[0])
# name of dll to give the helper files the same base name
(dll_name, dll_extension) = os.path.splitext(
os.path.basename(output_filename))
# generate the filenames for these files
def_file = os.path.join(temp_dir, dll_name + ".def")
# Generate .def file
contents = [
"LIBRARY %s INITINSTANCE TERMINSTANCE" % \
os.path.splitext(os.path.basename(output_filename))[0],
"DATA MULTIPLE NONSHARED",
"EXPORTS"]
for sym in export_symbols:
contents.append(' "%s"' % sym)
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# next add options for def-file and to creating import libraries
# for gcc/ld the def-file is specified as any other object files
objects.append(def_file)
#end: if ((export_symbols is not None) and
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
# who wants symbols and a many times larger output file
# should explicitly switch the debug mode on
# otherwise we let dllwrap/ld strip the output file
# (On my machine: 10KB < stripped_file < ??100KB
# unstripped_file = stripped_file + XXX KB
# ( XXX=254 for a typical python extension))
if not debug:
extra_preargs.append("-s")
UnixCCompiler.link(self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
None, # export_symbols, we do this in our def-file
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang)
# link ()
# -- Miscellaneous methods -----------------------------------------
# override the object_filenames method from CCompiler to
# support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc']):
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % \
(ext, src_name)
if strip_dir:
base = os.path.basename (base)
if ext == '.rc':
# these need to be compiled to object files
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
# override the find_library_file method from UnixCCompiler
# to deal with file naming/searching differences
def find_library_file(self, dirs, lib, debug=0):
shortlib = '%s.lib' % lib
longlib = 'lib%s.lib' % lib # this form very rare
# get EMX's default library directory search path
try:
emx_dirs = os.environ['LIBRARY_PATH'].split(';')
except KeyError:
emx_dirs = []
for dir in dirs + emx_dirs:
shortlibp = os.path.join(dir, shortlib)
longlibp = os.path.join(dir, longlib)
if os.path.exists(shortlibp):
return shortlibp
elif os.path.exists(longlibp):
return longlibp
# Oops, didn't find it in *any* of 'dirs'
return None
# class EMXCCompiler
# Because these compilers aren't configured in Python's pyconfig.h file by
# default, we should at least warn the user if he is using a unmodified
# version.
CONFIG_H_OK = "ok"
CONFIG_H_NOTOK = "not ok"
CONFIG_H_UNCERTAIN = "uncertain"
def check_config_h():
"""Check if the current Python installation (specifically, pyconfig.h)
appears amenable to building extensions with GCC. Returns a tuple
(status, details), where 'status' is one of the following constants:
CONFIG_H_OK
all is well, go ahead and compile
CONFIG_H_NOTOK
doesn't look good
CONFIG_H_UNCERTAIN
not sure -- unable to read pyconfig.h
'details' is a human-readable string explaining the situation.
Note there are two ways to conclude "OK": either 'sys.version' contains
the string "GCC" (implying that this Python was built with GCC), or the
installed "pyconfig.h" contains the string "__GNUC__".
"""
# XXX since this function also checks sys.version, it's not strictly a
# "pyconfig.h" check -- should probably be renamed...
from distutils import sysconfig
import string
# if sys.version contains GCC then python was compiled with
# GCC, and the pyconfig.h file should be OK
if string.find(sys.version,"GCC") >= 0:
return (CONFIG_H_OK, "sys.version mentions 'GCC'")
fn = sysconfig.get_config_h_filename()
try:
# It would probably better to read single lines to search.
# But we do this only once, and it is fast enough
f = open(fn)
s = f.read()
f.close()
except IOError, exc:
# if we can't read this file, we cannot say it is wrong
# the compiler will complain later about this file as missing
return (CONFIG_H_UNCERTAIN,
"couldn't read '%s': %s" % (fn, exc.strerror))
else:
# "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
if string.find(s,"__GNUC__") >= 0:
return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
else:
return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
def get_versions():
""" Try to find out the versions of gcc and ld.
If not possible it returns None for it.
"""
from distutils.version import StrictVersion
from distutils.spawn import find_executable
import re
gcc_exe = find_executable('gcc')
if gcc_exe:
out = os.popen(gcc_exe + ' -dumpversion','r')
out_string = out.read()
out.close()
result = re.search('(\d+\.\d+\.\d+)',out_string)
if result:
gcc_version = StrictVersion(result.group(1))
else:
gcc_version = None
else:
gcc_version = None
# EMX ld has no way of reporting version number, and we use GCC
# anyway - so we can link OMF DLLs
ld_version = None
return (gcc_version, ld_version)
|
oscarolar/odoo
|
refs/heads/master
|
addons/payment_ogone/data/ogone.py
|
395
|
# -*- coding: utf-8 -*-
OGONE_ERROR_MAP = {
'0020001001': "Authorization failed, please retry",
'0020001002': "Authorization failed, please retry",
'0020001003': "Authorization failed, please retry",
'0020001004': "Authorization failed, please retry",
'0020001005': "Authorization failed, please retry",
'0020001006': "Authorization failed, please retry",
'0020001007': "Authorization failed, please retry",
'0020001008': "Authorization failed, please retry",
'0020001009': "Authorization failed, please retry",
'0020001010': "Authorization failed, please retry",
'0030001999': "Our payment system is currently under maintenance, please try later",
'0050001005': "Expiry date error",
'0050001007': "Requested Operation code not allowed",
'0050001008': "Invalid delay value",
'0050001010': "Input date in invalid format",
'0050001013': "Unable to parse socket input stream",
'0050001014': "Error in parsing stream content",
'0050001015': "Currency error",
'0050001016': "Transaction still posted at end of wait",
'0050001017': "Sync value not compatible with delay value",
'0050001019': "Transaction duplicate of a pre-existing transaction",
'0050001020': "Acceptation code empty while required for the transaction",
'0050001024': "Maintenance acquirer differs from original transaction acquirer",
'0050001025': "Maintenance merchant differs from original transaction merchant",
'0050001028': "Maintenance operation not accurate for the original transaction",
'0050001031': "Host application unknown for the transaction",
'0050001032': "Unable to perform requested operation with requested currency",
'0050001033': "Maintenance card number differs from original transaction card number",
'0050001034': "Operation code not allowed",
'0050001035': "Exception occurred in socket input stream treatment",
'0050001036': "Card length does not correspond to an acceptable value for the brand",
'0050001036': "Card length does not correspond to an acceptable value for the brand",
'0050001068': "A technical problem occurred, please contact helpdesk",
'0050001069': "Invalid check for CardID and Brand",
'0050001070': "A technical problem occurred, please contact helpdesk",
'0050001116': "Unknown origin IP",
'0050001117': "No origin IP detected",
'0050001118': "Merchant configuration problem, please contact support",
'10001001': "Communication failure",
'10001002': "Communication failure",
'10001003': "Communication failure",
'10001004': "Communication failure",
'10001005': "Communication failure",
'20001001': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001002': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001003': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001004': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001005': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001006': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001007': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001008': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001009': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001010': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001101': "A technical problem occurred, please contact helpdesk",
'20001105': "We received an unknown status for the transaction. We will contact your acquirer and update the status of the transaction within one working day. Please check the status later.",
'20001111': "A technical problem occurred, please contact helpdesk",
'20002001': "Origin for the response of the bank can not be checked",
'20002002': "Beneficiary account number has been modified during processing",
'20002003': "Amount has been modified during processing",
'20002004': "Currency has been modified during processing",
'20002005': "No feedback from the bank server has been detected",
'30001001': "Payment refused by the acquirer",
'30001002': "Duplicate request",
'30001010': "A technical problem occurred, please contact helpdesk",
'30001011': "A technical problem occurred, please contact helpdesk",
'30001012': "Card black listed - Contact acquirer",
'30001015': "Your merchant's acquirer is temporarily unavailable, please try later or choose another payment method.",
'30001051': "A technical problem occurred, please contact helpdesk",
'30001054': "A technical problem occurred, please contact helpdesk",
'30001057': "Your merchant's acquirer is temporarily unavailable, please try later or choose another payment method.",
'30001058': "Your merchant's acquirer is temporarily unavailable, please try later or choose another payment method.",
'30001060': "Aquirer indicates that a failure occured during payment processing",
'30001070': "RATEPAY Invalid Response Type (Failure)",
'30001071': "RATEPAY Missing Mandatory status code field (failure)",
'30001072': "RATEPAY Missing Mandatory Result code field (failure)",
'30001073': "RATEPAY Response parsing Failed",
'30001090': "CVC check required by front end and returned invalid by acquirer",
'30001091': "ZIP check required by front end and returned invalid by acquirer",
'30001092': "Address check required by front end and returned as invalid by acquirer.",
'30001100': "Unauthorized buyer's country",
'30001101': "IP country <> card country",
'30001102': "Number of different countries too high",
'30001103': "unauthorized card country",
'30001104': "unauthorized ip address country",
'30001105': "Anonymous proxy",
'30001110': "If the problem persists, please contact Support, or go to paysafecard's card balance page (https://customer.cc.at.paysafecard.com/psccustomer/GetWelcomePanelServlet?language=en) to see when the amount reserved on your card will be available again.",
'30001120': "IP address in merchant's black list",
'30001130': "BIN in merchant's black list",
'30001131': "Wrong BIN for 3xCB",
'30001140': "Card in merchant's card blacklist",
'30001141': "Email in blacklist",
'30001142': "Passenger name in blacklist",
'30001143': "Card holder name in blacklist",
'30001144': "Passenger name different from owner name",
'30001145': "Time to departure too short",
'30001149': "Card Configured in Card Supplier Limit for another relation (CSL)",
'30001150': "Card not configured in the system for this customer (CSL)",
'30001151': "REF1 not allowed for this relationship (Contract number",
'30001152': "Card/Supplier Amount limit reached (CSL)",
'30001153': "Card not allowed for this supplier (Date out of contract bounds)",
'30001154': "You have reached the usage limit allowed",
'30001155': "You have reached the usage limit allowed",
'30001156': "You have reached the usage limit allowed",
'30001157': "Unauthorized IP country for itinerary",
'30001158': "email usage limit reached",
'30001159': "Unauthorized card country/IP country combination",
'30001160': "Postcode in highrisk group",
'30001161': "generic blacklist match",
'30001162': "Billing Address is a PO Box",
'30001180': "maximum scoring reached",
'30001997': "Authorization canceled by simulation",
'30001998': "A technical problem occurred, please try again.",
'30001999': "Your merchant's acquirer is temporarily unavailable, please try later or choose another payment method.",
'30002001': "Payment refused by the financial institution",
'30002001': "Payment refused by the financial institution",
'30021001': "Call acquirer support call number.",
'30022001': "Payment must be approved by the acquirer before execution.",
'30031001': "Invalid merchant number.",
'30041001': "Retain card.",
'30051001': "Authorization declined",
'30071001': "Retain card - special conditions.",
'30121001': "Invalid transaction",
'30131001': "Invalid amount",
'30131002': "You have reached the total amount allowed",
'30141001': "Invalid card number",
'30151001': "Unknown acquiring institution.",
'30171001': "Payment method cancelled by the buyer",
'30171002': "The maximum time allowed is elapsed.",
'30191001': "Try again later.",
'30201001': "A technical problem occurred, please contact helpdesk",
'30301001': "Invalid format",
'30311001': "Unknown acquirer ID.",
'30331001': "Card expired.",
'30341001': "Suspicion of fraud.",
'30341002': "Suspicion of fraud (3rdMan)",
'30341003': "Suspicion of fraud (Perseuss)",
'30341004': "Suspicion of fraud (ETHOCA)",
'30381001': "A technical problem occurred, please contact helpdesk",
'30401001': "Invalid function.",
'30411001': "Lost card.",
'30431001': "Stolen card, pick up",
'30511001': "Insufficient funds.",
'30521001': "No Authorization. Contact the issuer of your card.",
'30541001': "Card expired.",
'30551001': "Invalid PIN.",
'30561001': "Card not in authorizer's database.",
'30571001': "Transaction not permitted on card.",
'30581001': "Transaction not allowed on this terminal",
'30591001': "Suspicion of fraud.",
'30601001': "The merchant must contact the acquirer.",
'30611001': "Amount exceeds card ceiling.",
'30621001': "Restricted card.",
'30631001': "Security policy not respected.",
'30641001': "Amount changed from ref. trn.",
'30681001': "Tardy response.",
'30751001': "PIN entered incorrectly too often",
'30761001': "Card holder already contesting.",
'30771001': "PIN entry required.",
'30811001': "Message flow error.",
'30821001': "Authorization center unavailable",
'30831001': "Authorization center unavailable",
'30901001': "Temporary system shutdown.",
'30911001': "Acquirer unavailable.",
'30921001': "Invalid card type for acquirer.",
'30941001': "Duplicate transaction",
'30961001': "Processing temporarily not possible",
'30971001': "A technical problem occurred, please contact helpdesk",
'30981001': "A technical problem occurred, please contact helpdesk",
'31011001': "Unknown acceptance code",
'31021001': "Invalid currency",
'31031001': "Acceptance code missing",
'31041001': "Inactive card",
'31051001': "Merchant not active",
'31061001': "Invalid expiration date",
'31071001': "Interrupted host communication",
'31081001': "Card refused",
'31091001': "Invalid password",
'31101001': "Plafond transaction (majoré du bonus) dépassé",
'31111001': "Plafond mensuel (majoré du bonus) dépassé",
'31121001': "Plafond centre de facturation dépassé",
'31131001': "Plafond entreprise dépassé",
'31141001': "Code MCC du fournisseur non autorisé pour la carte",
'31151001': "Numéro SIRET du fournisseur non autorisé pour la carte",
'31161001': "This is not a valid online banking account",
'32001004': "A technical problem occurred, please try again.",
'34011001': "Bezahlung mit RatePAY nicht möglich.",
'39991001': "A technical problem occurred, please contact the helpdesk of your acquirer",
'40001001': "A technical problem occurred, please try again.",
'40001002': "A technical problem occurred, please try again.",
'40001003': "A technical problem occurred, please try again.",
'40001004': "A technical problem occurred, please try again.",
'40001005': "A technical problem occurred, please try again.",
'40001006': "A technical problem occurred, please try again.",
'40001007': "A technical problem occurred, please try again.",
'40001008': "A technical problem occurred, please try again.",
'40001009': "A technical problem occurred, please try again.",
'40001010': "A technical problem occurred, please try again.",
'40001011': "A technical problem occurred, please contact helpdesk",
'40001012': "Your merchant's acquirer is temporarily unavailable, please try later or choose another payment method.",
'40001013': "A technical problem occurred, please contact helpdesk",
'40001016': "A technical problem occurred, please contact helpdesk",
'40001018': "A technical problem occurred, please try again.",
'40001019': "Sorry, an error occurred during processing. Please retry the operation (use back button of the browser). If problem persists, contact your merchant's helpdesk.",
'40001020': "Sorry, an error occurred during processing. Please retry the operation (use back button of the browser). If problem persists, contact your merchant's helpdesk.",
'40001050': "A technical problem occurred, please contact helpdesk",
'40001133': "Authentication failed, the signature of your bank access control server is incorrect",
'40001134': "Authentication failed, please retry or cancel.",
'40001135': "Authentication temporary unavailable, please retry or cancel.",
'40001136': "Technical problem with your browser, please retry or cancel",
'40001137': "Your bank access control server is temporary unavailable, please retry or cancel",
'40001998': "Temporary technical problem. Please retry a little bit later.",
'50001001': "Unknown card type",
'50001002': "Card number format check failed for given card number.",
'50001003': "Merchant data error",
'50001004': "Merchant identification missing",
'50001005': "Expiry date error",
'50001006': "Amount is not a number",
'50001007': "A technical problem occurred, please contact helpdesk",
'50001008': "A technical problem occurred, please contact helpdesk",
'50001009': "A technical problem occurred, please contact helpdesk",
'50001010': "A technical problem occurred, please contact helpdesk",
'50001011': "Brand not supported for that merchant",
'50001012': "A technical problem occurred, please contact helpdesk",
'50001013': "A technical problem occurred, please contact helpdesk",
'50001014': "A technical problem occurred, please contact helpdesk",
'50001015': "Invalid currency code",
'50001016': "A technical problem occurred, please contact helpdesk",
'50001017': "A technical problem occurred, please contact helpdesk",
'50001018': "A technical problem occurred, please contact helpdesk",
'50001019': "A technical problem occurred, please contact helpdesk",
'50001020': "A technical problem occurred, please contact helpdesk",
'50001021': "A technical problem occurred, please contact helpdesk",
'50001022': "A technical problem occurred, please contact helpdesk",
'50001023': "A technical problem occurred, please contact helpdesk",
'50001024': "A technical problem occurred, please contact helpdesk",
'50001025': "A technical problem occurred, please contact helpdesk",
'50001026': "A technical problem occurred, please contact helpdesk",
'50001027': "A technical problem occurred, please contact helpdesk",
'50001028': "A technical problem occurred, please contact helpdesk",
'50001029': "A technical problem occurred, please contact helpdesk",
'50001030': "A technical problem occurred, please contact helpdesk",
'50001031': "A technical problem occurred, please contact helpdesk",
'50001032': "A technical problem occurred, please contact helpdesk",
'50001033': "A technical problem occurred, please contact helpdesk",
'50001034': "A technical problem occurred, please contact helpdesk",
'50001035': "A technical problem occurred, please contact helpdesk",
'50001036': "Card length does not correspond to an acceptable value for the brand",
'50001037': "Purchasing card number for a regular merchant",
'50001038': "Non Purchasing card for a Purchasing card merchant",
'50001039': "Details sent for a non-Purchasing card merchant, please contact helpdesk",
'50001040': "Details not sent for a Purchasing card transaction, please contact helpdesk",
'50001041': "Payment detail validation failed",
'50001042': "Given transactions amounts (tax,discount,shipping,net,etc…) do not compute correctly together",
'50001043': "A technical problem occurred, please contact helpdesk",
'50001044': "No acquirer configured for this operation",
'50001045': "No UID configured for this operation",
'50001046': "Operation not allowed for the merchant",
'50001047': "A technical problem occurred, please contact helpdesk",
'50001048': "A technical problem occurred, please contact helpdesk",
'50001049': "A technical problem occurred, please contact helpdesk",
'50001050': "A technical problem occurred, please contact helpdesk",
'50001051': "A technical problem occurred, please contact helpdesk",
'50001052': "A technical problem occurred, please contact helpdesk",
'50001053': "A technical problem occurred, please contact helpdesk",
'50001054': "Card number incorrect or incompatible",
'50001055': "A technical problem occurred, please contact helpdesk",
'50001056': "A technical problem occurred, please contact helpdesk",
'50001057': "A technical problem occurred, please contact helpdesk",
'50001058': "A technical problem occurred, please contact helpdesk",
'50001059': "A technical problem occurred, please contact helpdesk",
'50001060': "A technical problem occurred, please contact helpdesk",
'50001061': "A technical problem occurred, please contact helpdesk",
'50001062': "A technical problem occurred, please contact helpdesk",
'50001063': "Card Issue Number does not correspond to range or not present",
'50001064': "Start Date not valid or not present",
'50001066': "Format of CVC code invalid",
'50001067': "The merchant is not enrolled for 3D-Secure",
'50001068': "The card number or account number (PAN) is invalid",
'50001069': "Invalid check for CardID and Brand",
'50001070': "The ECI value given is either not supported, or in conflict with other data in the transaction",
'50001071': "Incomplete TRN demat",
'50001072': "Incomplete PAY demat",
'50001073': "No demat APP",
'50001074': "Authorisation too old",
'50001075': "VERRes was an error message",
'50001076': "DCP amount greater than authorisation amount",
'50001077': "Details negative amount",
'50001078': "Details negative quantity",
'50001079': "Could not decode/decompress received PARes (3D-Secure)",
'50001080': "Received PARes was an erereor message from ACS (3D-Secure)",
'50001081': "Received PARes format was invalid according to the 3DS specifications (3D-Secure)",
'50001082': "PAReq/PARes reconciliation failure (3D-Secure)",
'50001084': "Maximum amount reached",
'50001087': "The transaction type requires authentication, please check with your bank.",
'50001090': "CVC missing at input, but CVC check asked",
'50001091': "ZIP missing at input, but ZIP check asked",
'50001092': "Address missing at input, but Address check asked",
'50001095': "Invalid date of birth",
'50001096': "Invalid commodity code",
'50001097': "The requested currency and brand are incompatible.",
'50001111': "Data validation error",
'50001113': "This order has already been processed",
'50001114': "Error pre-payment check page access",
'50001115': "Request not received in secure mode",
'50001116': "Unknown IP address origin",
'50001117': "NO IP address origin",
'50001118': "Pspid not found or not correct",
'50001119': "Password incorrect or disabled due to numbers of errors",
'50001120': "Invalid currency",
'50001121': "Invalid number of decimals for the currency",
'50001122': "Currency not accepted by the merchant",
'50001123': "Card type not active",
'50001124': "Number of lines don't match with number of payments",
'50001125': "Format validation error",
'50001126': "Overflow in data capture requests for the original order",
'50001127': "The original order is not in a correct status",
'50001128': "missing authorization code for unauthorized order",
'50001129': "Overflow in refunds requests",
'50001130': "Error access to original order",
'50001131': "Error access to original history item",
'50001132': "The Selected Catalog is empty",
'50001133': "Duplicate request",
'50001134': "Authentication failed, please retry or cancel.",
'50001135': "Authentication temporary unavailable, please retry or cancel.",
'50001136': "Technical problem with your browser, please retry or cancel",
'50001137': "Your bank access control server is temporary unavailable, please retry or cancel",
'50001150': "Fraud Detection, Technical error (IP not valid)",
'50001151': "Fraud detection : technical error (IPCTY unknown or error)",
'50001152': "Fraud detection : technical error (CCCTY unknown or error)",
'50001153': "Overflow in redo-authorisation requests",
'50001170': "Dynamic BIN check failed",
'50001171': "Dynamic country check failed",
'50001172': "Error in Amadeus signature",
'50001174': "Card Holder Name is too long",
'50001175': "Name contains invalid characters",
'50001176': "Card number is too long",
'50001177': "Card number contains non-numeric info",
'50001178': "Card Number Empty",
'50001179': "CVC too long",
'50001180': "CVC contains non-numeric info",
'50001181': "Expiration date contains non-numeric info",
'50001182': "Invalid expiration month",
'50001183': "Expiration date must be in the future",
'50001184': "SHA Mismatch",
'50001205': "Missing mandatory fields for billing address.",
'50001206': "Missing mandatory field date of birth.",
'50001207': "Missing required shopping basket details.",
'50001208': "Missing social security number",
'50001209': "Invalid country code",
'50001210': "Missing yearly salary",
'50001211': "Missing gender",
'50001212': "Missing email",
'50001213': "Missing IP address",
'50001214': "Missing part payment campaign ID",
'50001215': "Missing invoice number",
'50001216': "The alias must be different than the card number",
'60000001': "account number unknown",
'60000003': "not credited dd-mm-yy",
'60000005': "name/number do not correspond",
'60000007': "account number blocked",
'60000008': "specific direct debit block",
'60000009': "account number WKA",
'60000010': "administrative reason",
'60000011': "account number expired",
'60000012': "no direct debit authorisation given",
'60000013': "debit not approved",
'60000014': "double payment",
'60000018': "name/address/city not entered",
'60001001': "no original direct debit for revocation",
'60001002': "payer’s account number format error",
'60001004': "payer’s account at different bank",
'60001005': "payee’s account at different bank",
'60001006': "payee’s account number format error",
'60001007': "payer’s account number blocked",
'60001008': "payer’s account number expired",
'60001009': "payee’s account number expired",
'60001010': "direct debit not possible",
'60001011': "creditor payment not possible",
'60001012': "payer’s account number unknown WKA-number",
'60001013': "payee’s account number unknown WKA-number",
'60001014': "impermissible WKA transaction",
'60001015': "period for revocation expired",
'60001017': "reason for revocation not correct",
'60001018': "original run number not numeric",
'60001019': "payment ID incorrect",
'60001020': "amount not numeric",
'60001021': "amount zero not permitted",
'60001022': "negative amount not permitted",
'60001023': "payer and payee giro account number",
'60001025': "processing code (verwerkingscode) incorrect",
'60001028': "revocation not permitted",
'60001029': "guaranteed direct debit on giro account number",
'60001030': "NBC transaction type incorrect",
'60001031': "description too large",
'60001032': "book account number not issued",
'60001034': "book account number incorrect",
'60001035': "payer’s account number not numeric",
'60001036': "payer’s account number not eleven-proof",
'60001037': "payer’s account number not issued",
'60001039': "payer’s account number of DNB/BGC/BLA",
'60001040': "payee’s account number not numeric",
'60001041': "payee’s account number not eleven-proof",
'60001042': "payee’s account number not issued",
'60001044': "payee’s account number unknown",
'60001050': "payee’s name missing",
'60001051': "indicate payee’s bank account number instead of 3102",
'60001052': "no direct debit contract",
'60001053': "amount beyond bounds",
'60001054': "selective direct debit block",
'60001055': "original run number unknown",
'60001057': "payer’s name missing",
'60001058': "payee’s account number missing",
'60001059': "restore not permitted",
'60001060': "bank’s reference (navraaggegeven) missing",
'60001061': "BEC/GBK number incorrect",
'60001062': "BEC/GBK code incorrect",
'60001087': "book account number not numeric",
'60001090': "cancelled on request",
'60001091': "cancellation order executed",
'60001092': "cancelled instead of bended",
'60001093': "book account number is a shortened account number",
'60001094': "instructing party account number not identical with payer",
'60001095': "payee unknown GBK acceptor",
'60001097': "instructing party account number not identical with payee",
'60001099': "clearing not permitted",
'60001101': "payer’s account number not spaces",
'60001102': "PAN length not numeric",
'60001103': "PAN length outside limits",
'60001104': "track number not numeric",
'60001105': "track number not valid",
'60001106': "PAN sequence number not numeric",
'60001107': "domestic PAN not numeric",
'60001108': "domestic PAN not eleven-proof",
'60001109': "domestic PAN not issued",
'60001110': "foreign PAN not numeric",
'60001111': "card valid date not numeric",
'60001112': "book period number (boekperiodenr) not numeric",
'60001113': "transaction number not numeric",
'60001114': "transaction time not numeric",
'60001115': "transaction no valid time",
'60001116': "transaction date not numeric",
'60001117': "transaction no valid date",
'60001118': "STAN not numeric",
'60001119': "instructing party’s name missing",
'60001120': "foreign amount (bedrag-vv) not numeric",
'60001122': "rate (verrekenkoers) not numeric",
'60001125': "number of decimals (aantaldecimalen) incorrect",
'60001126': "tariff (tarifering) not B/O/S",
'60001127': "domestic costs (kostenbinnenland) not numeric",
'60001128': "domestic costs (kostenbinnenland) not higher than zero",
'60001129': "foreign costs (kostenbuitenland) not numeric",
'60001130': "foreign costs (kostenbuitenland) not higher than zero",
'60001131': "domestic costs (kostenbinnenland) not zero",
'60001132': "foreign costs (kostenbuitenland) not zero",
'60001134': "Euro record not fully filled in",
'60001135': "Client currency incorrect",
'60001136': "Amount NLG not numeric",
'60001137': "Amount NLG not higher than zero",
'60001138': "Amount NLG not equal to Amount",
'60001139': "Amount NLG incorrectly converted",
'60001140': "Amount EUR not numeric",
'60001141': "Amount EUR not greater than zero",
'60001142': "Amount EUR not equal to Amount",
'60001143': "Amount EUR incorrectly converted",
'60001144': "Client currency not NLG",
'60001145': "rate euro-vv (Koerseuro-vv) not numeric",
'60001146': "comma rate euro-vv (Kommakoerseuro-vv) incorrect",
'60001147': "acceptgiro distributor not valid",
'60001148': "Original run number and/or BRN are missing",
'60001149': "Amount/Account number/ BRN different",
'60001150': "Direct debit already revoked/restored",
'60001151': "Direct debit already reversed/revoked/restored",
'60001153': "Payer’s account number not known",
}
DATA_VALIDATION_ERROR = '50001111'
def retryable(error):
return error in [
'0020001001', '0020001002', '0020001003', '0020001004', '0020001005',
'0020001006', '0020001007', '0020001008', '0020001009', '0020001010',
'30001010', '30001011', '30001015',
'30001057', '30001058',
'30001998', '30001999',
#'30611001', # amount exceeds card limit
'30961001',
'40001001', '40001002', '40001003', '40001004', '40001005',
'40001006', '40001007', '40001008', '40001009', '40001010',
'40001012',
'40001018', '40001019', '40001020',
'40001134', '40001135', '40001136', '40001137',
#'50001174', # cardholder name too long
]
|
Team02-TeamGuinness/BIOE421_RoboHand
|
refs/heads/master
|
ablation-GUI/CONFIG/2015-January-31/06-01-50 PM C3-3_12-well-CONFIG.py
|
1
|
#File
fname = 'C3-3_12-well.gcode'
f=open(fname,'w')
#Laser Parameters
laserPower = 15 #% max power
dwellTime = 15 #ms
x_start = 416
y_start = 343
z_start = 122.10 #mm above home
pauseTime = 500 #ms; time paused after movement before ablation
feedRate = 500 #movement speed
# Rectangle size properties
rectLength = 23 #mm; x-direction
rectWidth = 23 #mm; y-direction
spaceSmall = 3 #mm; space between rectangles
hexLength = 1.000 #mm
#Other
relative = 0 #0 for homing before beginning. 1 if machine has already been homed
|
atlashealth/ansible-modules-extras
|
refs/heads/devel
|
__init__.py
|
12133432
| |
BQLQ/BQLQ
|
refs/heads/master
|
mezzanine-4.2.3/mezzanine/generic/migrations/__init__.py
|
12133432
| |
gaste/dwasp
|
refs/heads/master
|
tests/wasp1/AllAnswerSets/aggregates_count_grounding_4.test.py
|
3
|
input = """
b(1). b(2).
c(a). c(b).
a(A,B) v na(A,B) :- c(A), b(B).
:- 1 < #count{A:a(A,B)}, b(B).
"""
output = """
{a(a,1), a(a,2), b(1), b(2), c(a), c(b), na(b,1), na(b,2)}
{a(a,1), a(b,2), b(1), b(2), c(a), c(b), na(a,2), na(b,1)}
{a(a,1), b(1), b(2), c(a), c(b), na(a,2), na(b,1), na(b,2)}
{a(a,2), a(b,1), b(1), b(2), c(a), c(b), na(a,1), na(b,2)}
{a(a,2), b(1), b(2), c(a), c(b), na(a,1), na(b,1), na(b,2)}
{a(b,1), a(b,2), b(1), b(2), c(a), c(b), na(a,1), na(a,2)}
{a(b,1), b(1), b(2), c(a), c(b), na(a,1), na(a,2), na(b,2)}
{a(b,2), b(1), b(2), c(a), c(b), na(a,1), na(a,2), na(b,1)}
{b(1), b(2), c(a), c(b), na(a,1), na(a,2), na(b,1), na(b,2)}
"""
|
gangadhar-kadam/laganerp
|
refs/heads/master
|
erpnext/stock/doctype/material_request/test_material_request.py
|
13
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# ERPNext - web based ERP (http://erpnext.com)
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, unittest
from frappe.utils import flt
class TestMaterialRequest(unittest.TestCase):
def setUp(self):
frappe.defaults.set_global_default("auto_accounting_for_stock", 0)
def test_make_purchase_order(self):
from erpnext.stock.doctype.material_request.material_request import make_purchase_order
mr = frappe.copy_doc(test_records[0]).insert()
self.assertRaises(frappe.ValidationError, make_purchase_order,
mr.name)
mr = frappe.get_doc("Material Request", mr.name)
mr.submit()
po = make_purchase_order(mr.name)
self.assertEquals(po.doctype, "Purchase Order")
self.assertEquals(len(po.get("po_details")), len(mr.get("indent_details")))
def test_make_supplier_quotation(self):
from erpnext.stock.doctype.material_request.material_request import make_supplier_quotation
mr = frappe.copy_doc(test_records[0]).insert()
self.assertRaises(frappe.ValidationError, make_supplier_quotation, mr.name)
mr = frappe.get_doc("Material Request", mr.name)
mr.submit()
sq = make_supplier_quotation(mr.name)
self.assertEquals(sq.doctype, "Supplier Quotation")
self.assertEquals(len(sq.get("quotation_items")), len(mr.get("indent_details")))
def test_make_stock_entry(self):
from erpnext.stock.doctype.material_request.material_request import make_stock_entry
mr = frappe.copy_doc(test_records[0]).insert()
self.assertRaises(frappe.ValidationError, make_stock_entry,
mr.name)
mr = frappe.get_doc("Material Request", mr.name)
mr.material_request_type = "Transfer"
mr.submit()
se = make_stock_entry(mr.name)
self.assertEquals(se.doctype, "Stock Entry")
self.assertEquals(len(se.get("mtn_details")), len(mr.get("indent_details")))
def _test_requested_qty(self, qty1, qty2):
self.assertEqual(flt(frappe.db.get_value("Bin", {"item_code": "_Test Item Home Desktop 100",
"warehouse": "_Test Warehouse - _TC"}, "indented_qty")), qty1)
self.assertEqual(flt(frappe.db.get_value("Bin", {"item_code": "_Test Item Home Desktop 200",
"warehouse": "_Test Warehouse - _TC"}, "indented_qty")), qty2)
def _insert_stock_entry(self, qty1, qty2):
se = frappe.get_doc({
"company": "_Test Company",
"doctype": "Stock Entry",
"posting_date": "2013-03-01",
"posting_time": "00:00:00",
"purpose": "Material Receipt",
"fiscal_year": "_Test Fiscal Year 2013",
"mtn_details": [
{
"conversion_factor": 1.0,
"doctype": "Stock Entry Detail",
"item_code": "_Test Item Home Desktop 100",
"parentfield": "mtn_details",
"incoming_rate": 100,
"qty": qty1,
"stock_uom": "_Test UOM 1",
"transfer_qty": qty1,
"uom": "_Test UOM 1",
"t_warehouse": "_Test Warehouse 1 - _TC",
},
{
"conversion_factor": 1.0,
"doctype": "Stock Entry Detail",
"item_code": "_Test Item Home Desktop 200",
"parentfield": "mtn_details",
"incoming_rate": 100,
"qty": qty2,
"stock_uom": "_Test UOM 1",
"transfer_qty": qty2,
"uom": "_Test UOM 1",
"t_warehouse": "_Test Warehouse 1 - _TC",
}
]
})
se.insert()
se.submit()
def test_completed_qty_for_purchase(self):
frappe.db.sql("""delete from `tabBin`""")
# submit material request of type Purchase
mr = frappe.copy_doc(test_records[0])
mr.insert()
mr.submit()
# check if per complete is None
self.assertEquals(mr.per_ordered, None)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 0)
self._test_requested_qty(54.0, 3.0)
# map a purchase order
from erpnext.stock.doctype.material_request.material_request import make_purchase_order
po_doc = make_purchase_order(mr.name)
po_doc.supplier = "_Test Supplier"
po_doc.transaction_date = "2013-07-07"
po_doc.get("po_details")[0].qty = 27.0
po_doc.get("po_details")[1].qty = 1.5
po_doc.get("po_details")[0].schedule_date = "2013-07-09"
po_doc.get("po_details")[1].schedule_date = "2013-07-09"
# check for stopped status of Material Request
po = frappe.copy_doc(po_doc)
po.insert()
po.load_from_db()
mr.update_status('Stopped')
self.assertRaises(frappe.InvalidStatusError, po.submit)
frappe.db.set(po, "docstatus", 1)
self.assertRaises(frappe.InvalidStatusError, po.cancel)
# resubmit and check for per complete
mr.load_from_db()
mr.update_status('Submitted')
po = frappe.copy_doc(po_doc)
po.insert()
po.submit()
# check if per complete is as expected
mr.load_from_db()
self.assertEquals(mr.per_ordered, 50)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 27.0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 1.5)
self._test_requested_qty(27.0, 1.5)
po.cancel()
# check if per complete is as expected
mr.load_from_db()
self.assertEquals(mr.per_ordered, None)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, None)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, None)
self._test_requested_qty(54.0, 3.0)
def test_completed_qty_for_transfer(self):
frappe.db.sql("""delete from `tabBin`""")
frappe.db.sql("""delete from `tabStock Ledger Entry`""")
# submit material request of type Purchase
mr = frappe.copy_doc(test_records[0])
mr.material_request_type = "Transfer"
mr.insert()
mr.submit()
# check if per complete is None
self.assertEquals(mr.per_ordered, None)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 0)
self._test_requested_qty(54.0, 3.0)
from erpnext.stock.doctype.material_request.material_request import make_stock_entry
# map a stock entry
se_doc = make_stock_entry(mr.name)
se_doc.update({
"posting_date": "2013-03-01",
"posting_time": "01:00",
"fiscal_year": "_Test Fiscal Year 2013",
})
se_doc.get("mtn_details")[0].update({
"qty": 27.0,
"transfer_qty": 27.0,
"s_warehouse": "_Test Warehouse 1 - _TC",
"incoming_rate": 1.0
})
se_doc.get("mtn_details")[1].update({
"qty": 1.5,
"transfer_qty": 1.5,
"s_warehouse": "_Test Warehouse 1 - _TC",
"incoming_rate": 1.0
})
# make available the qty in _Test Warehouse 1 before transfer
self._insert_stock_entry(27.0, 1.5)
# check for stopped status of Material Request
se = frappe.copy_doc(se_doc)
se.insert()
mr.update_status('Stopped')
self.assertRaises(frappe.InvalidStatusError, se.submit)
mr.update_status('Submitted')
se.ignore_validate_update_after_submit = True
se.submit()
mr.update_status('Stopped')
self.assertRaises(frappe.InvalidStatusError, se.cancel)
mr.update_status('Submitted')
se = frappe.copy_doc(se_doc)
se.insert()
se.submit()
# check if per complete is as expected
mr.load_from_db()
self.assertEquals(mr.per_ordered, 50)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 27.0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 1.5)
self._test_requested_qty(27.0, 1.5)
# check if per complete is as expected for Stock Entry cancelled
se.cancel()
mr.load_from_db()
self.assertEquals(mr.per_ordered, 0)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 0)
self._test_requested_qty(54.0, 3.0)
def test_completed_qty_for_over_transfer(self):
frappe.db.sql("""delete from `tabBin`""")
frappe.db.sql("""delete from `tabStock Ledger Entry`""")
# submit material request of type Purchase
mr = frappe.copy_doc(test_records[0])
mr.material_request_type = "Transfer"
mr.insert()
mr.submit()
# check if per complete is None
self.assertEquals(mr.per_ordered, None)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 0)
self._test_requested_qty(54.0, 3.0)
# map a stock entry
from erpnext.stock.doctype.material_request.material_request import make_stock_entry
se_doc = make_stock_entry(mr.name)
se_doc.update({
"posting_date": "2013-03-01",
"posting_time": "00:00",
"fiscal_year": "_Test Fiscal Year 2013",
})
se_doc.get("mtn_details")[0].update({
"qty": 60.0,
"transfer_qty": 60.0,
"s_warehouse": "_Test Warehouse 1 - _TC",
"incoming_rate": 1.0
})
se_doc.get("mtn_details")[1].update({
"qty": 3.0,
"transfer_qty": 3.0,
"s_warehouse": "_Test Warehouse 1 - _TC",
"incoming_rate": 1.0
})
# make available the qty in _Test Warehouse 1 before transfer
self._insert_stock_entry(60.0, 3.0)
# check for stopped status of Material Request
se = frappe.copy_doc(se_doc)
se.insert()
mr.update_status('Stopped')
self.assertRaises(frappe.InvalidStatusError, se.submit)
self.assertRaises(frappe.InvalidStatusError, se.cancel)
mr.update_status('Submitted')
se = frappe.copy_doc(se_doc)
se.insert()
se.submit()
# check if per complete is as expected
mr.load_from_db()
self.assertEquals(mr.per_ordered, 100)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 60.0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 3.0)
self._test_requested_qty(0.0, 0.0)
# check if per complete is as expected for Stock Entry cancelled
se.cancel()
mr.load_from_db()
self.assertEquals(mr.per_ordered, 0)
self.assertEquals(mr.get("indent_details")[0].ordered_qty, 0)
self.assertEquals(mr.get("indent_details")[1].ordered_qty, 0)
self._test_requested_qty(54.0, 3.0)
def test_incorrect_mapping_of_stock_entry(self):
# submit material request of type Purchase
mr = frappe.copy_doc(test_records[0])
mr.material_request_type = "Transfer"
mr.insert()
mr.submit()
# map a stock entry
from erpnext.stock.doctype.material_request.material_request import make_stock_entry
se_doc = make_stock_entry(mr.name)
se_doc.update({
"posting_date": "2013-03-01",
"posting_time": "00:00",
"fiscal_year": "_Test Fiscal Year 2013",
})
se_doc.get("mtn_details")[0].update({
"qty": 60.0,
"transfer_qty": 60.0,
"s_warehouse": "_Test Warehouse - _TC",
"t_warehouse": "_Test Warehouse 1 - _TC",
"incoming_rate": 1.0
})
se_doc.get("mtn_details")[1].update({
"qty": 3.0,
"transfer_qty": 3.0,
"s_warehouse": "_Test Warehouse 1 - _TC",
"incoming_rate": 1.0
})
# check for stopped status of Material Request
se = frappe.copy_doc(se_doc)
self.assertRaises(frappe.MappingMismatchError, se.insert)
def test_warehouse_company_validation(self):
from erpnext.stock.utils import InvalidWarehouseCompany
mr = frappe.copy_doc(test_records[0])
mr.company = "_Test Company 1"
self.assertRaises(InvalidWarehouseCompany, mr.insert)
test_dependencies = ["Currency Exchange"]
test_records = frappe.get_test_records('Material Request')
|
debtcollective/debtcollective-web
|
refs/heads/master
|
be/proj/envconfig.py
|
2
|
import logging
import os
import re
from types import NoneType
logger = logging.getLogger(__name__)
def get_envconfig(globals_):
"""For use in the settings.py, this function allows any setting to
be overridden by the process environment.
"""
for var in globals_.keys():
if not re.match(r'^[A-Z0-9_]+$', var):
continue
if var in os.environ:
value = os.environ.get(var)
dest_type = type(globals_[var])
if dest_type == bool:
# permit many sensible 'no/false' settings
value = False if re.match(
r'^(f(alse)?|off|0|n(o)?)?$', value, re.I,
) else True
elif dest_type == NoneType:
value = str(value)
else:
# type-preserving assign
value = dest_type(value)
globals_[var] = value
|
qmagico/gae-migrations
|
refs/heads/master
|
tests/my/migrations_run_twice/__init__.py
|
2
|
__author__ = 'tacio'
|
sumspr/scikit-learn
|
refs/heads/master
|
examples/linear_model/plot_sgd_loss_functions.py
|
249
|
"""
==========================
SGD: convex loss functions
==========================
A plot that compares the various convex loss functions supported by
:class:`sklearn.linear_model.SGDClassifier` .
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
def modified_huber_loss(y_true, y_pred):
z = y_pred * y_true
loss = -4 * z
loss[z >= -1] = (1 - z[z >= -1]) ** 2
loss[z >= 1.] = 0
return loss
xmin, xmax = -4, 4
xx = np.linspace(xmin, xmax, 100)
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
label="Zero-one loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), 'g-',
label="Hinge loss")
plt.plot(xx, -np.minimum(xx, 0), 'm-',
label="Perceptron loss")
plt.plot(xx, np.log2(1 + np.exp(-xx)), 'r-',
label="Log loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, 'b-',
label="Squared hinge loss")
plt.plot(xx, modified_huber_loss(xx, 1), 'y--',
label="Modified Huber loss")
plt.ylim((0, 8))
plt.legend(loc="upper right")
plt.xlabel(r"Decision function $f(x)$")
plt.ylabel("$L(y, f(x))$")
plt.show()
|
dpetker/adventofcode
|
refs/heads/master
|
2018/src/__init__.py
|
12133432
| |
jameswatt2008/jameswatt2008.github.io
|
refs/heads/master
|
python/Python核心编程/网络编程/截图和代码/概述、SOCKET/多进程copy文件/test-复件/_threading_local.py
|
923
|
"""Thread-local objects.
(Note that this module provides a Python version of the threading.local
class. Depending on the version of Python you're using, there may be a
faster one available. You should always import the `local` class from
`threading`.)
Thread-local objects support the management of thread-local data.
If you have data that you want to be local to a thread, simply create
a thread-local object and use its attributes:
>>> mydata = local()
>>> mydata.number = 42
>>> mydata.number
42
You can also access the local-object's dictionary:
>>> mydata.__dict__
{'number': 42}
>>> mydata.__dict__.setdefault('widgets', [])
[]
>>> mydata.widgets
[]
What's important about thread-local objects is that their data are
local to a thread. If we access the data in a different thread:
>>> log = []
>>> def f():
... items = sorted(mydata.__dict__.items())
... log.append(items)
... mydata.number = 11
... log.append(mydata.number)
>>> import threading
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[], 11]
we get different data. Furthermore, changes made in the other thread
don't affect data seen in this thread:
>>> mydata.number
42
Of course, values you get from a local object, including a __dict__
attribute, are for whatever thread was current at the time the
attribute was read. For that reason, you generally don't want to save
these values across threads, as they apply only to the thread they
came from.
You can create custom local objects by subclassing the local class:
>>> class MyLocal(local):
... number = 2
... initialized = False
... def __init__(self, **kw):
... if self.initialized:
... raise SystemError('__init__ called too many times')
... self.initialized = True
... self.__dict__.update(kw)
... def squared(self):
... return self.number ** 2
This can be useful to support default values, methods and
initialization. Note that if you define an __init__ method, it will be
called each time the local object is used in a separate thread. This
is necessary to initialize each thread's dictionary.
Now if we create a local object:
>>> mydata = MyLocal(color='red')
Now we have a default number:
>>> mydata.number
2
an initial color:
>>> mydata.color
'red'
>>> del mydata.color
And a method that operates on the data:
>>> mydata.squared()
4
As before, we can access the data in a separate thread:
>>> log = []
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[('color', 'red'), ('initialized', True)], 11]
without affecting this thread's data:
>>> mydata.number
2
>>> mydata.color
Traceback (most recent call last):
...
AttributeError: 'MyLocal' object has no attribute 'color'
Note that subclasses can define slots, but they are not thread
local. They are shared across threads:
>>> class MyLocal(local):
... __slots__ = 'number'
>>> mydata = MyLocal()
>>> mydata.number = 42
>>> mydata.color = 'red'
So, the separate thread:
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
affects what we see:
>>> mydata.number
11
>>> del mydata
"""
from weakref import ref
from contextlib import contextmanager
__all__ = ["local"]
# We need to use objects from the threading module, but the threading
# module may also want to use our `local` class, if support for locals
# isn't compiled in to the `thread` module. This creates potential problems
# with circular imports. For that reason, we don't import `threading`
# until the bottom of this file (a hack sufficient to worm around the
# potential problems). Note that all platforms on CPython do have support
# for locals in the `thread` module, and there is no circular import problem
# then, so problems introduced by fiddling the order of imports here won't
# manifest.
class _localimpl:
"""A class managing thread-local dicts"""
__slots__ = 'key', 'dicts', 'localargs', 'locallock', '__weakref__'
def __init__(self):
# The key used in the Thread objects' attribute dicts.
# We keep it a string for speed but make it unlikely to clash with
# a "real" attribute.
self.key = '_threading_local._localimpl.' + str(id(self))
# { id(Thread) -> (ref(Thread), thread-local dict) }
self.dicts = {}
def get_dict(self):
"""Return the dict for the current thread. Raises KeyError if none
defined."""
thread = current_thread()
return self.dicts[id(thread)][1]
def create_dict(self):
"""Create a new dict for the current thread, and return it."""
localdict = {}
key = self.key
thread = current_thread()
idt = id(thread)
def local_deleted(_, key=key):
# When the localimpl is deleted, remove the thread attribute.
thread = wrthread()
if thread is not None:
del thread.__dict__[key]
def thread_deleted(_, idt=idt):
# When the thread is deleted, remove the local dict.
# Note that this is suboptimal if the thread object gets
# caught in a reference loop. We would like to be called
# as soon as the OS-level thread ends instead.
local = wrlocal()
if local is not None:
dct = local.dicts.pop(idt)
wrlocal = ref(self, local_deleted)
wrthread = ref(thread, thread_deleted)
thread.__dict__[key] = wrlocal
self.dicts[idt] = wrthread, localdict
return localdict
@contextmanager
def _patch(self):
impl = object.__getattribute__(self, '_local__impl')
try:
dct = impl.get_dict()
except KeyError:
dct = impl.create_dict()
args, kw = impl.localargs
self.__init__(*args, **kw)
with impl.locallock:
object.__setattr__(self, '__dict__', dct)
yield
class local:
__slots__ = '_local__impl', '__dict__'
def __new__(cls, *args, **kw):
if (args or kw) and (cls.__init__ is object.__init__):
raise TypeError("Initialization arguments are not supported")
self = object.__new__(cls)
impl = _localimpl()
impl.localargs = (args, kw)
impl.locallock = RLock()
object.__setattr__(self, '_local__impl', impl)
# We need to create the thread dict in anticipation of
# __init__ being called, to make sure we don't call it
# again ourselves.
impl.create_dict()
return self
def __getattribute__(self, name):
with _patch(self):
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name == '__dict__':
raise AttributeError(
"%r object attribute '__dict__' is read-only"
% self.__class__.__name__)
with _patch(self):
return object.__setattr__(self, name, value)
def __delattr__(self, name):
if name == '__dict__':
raise AttributeError(
"%r object attribute '__dict__' is read-only"
% self.__class__.__name__)
with _patch(self):
return object.__delattr__(self, name)
from threading import current_thread, RLock
|
gauravbose/digital-menu
|
refs/heads/master
|
digimenu2/django/contrib/syndication/__init__.py
|
808
|
default_app_config = 'django.contrib.syndication.apps.SyndicationConfig'
|
jmchilton/galaxy-central
|
refs/heads/master
|
modules/elementtree/HTMLTreeBuilder.py
|
103
|
#
# ElementTree
# $Id: HTMLTreeBuilder.py 2325 2005-03-16 15:50:43Z fredrik $
#
# a simple tree builder, for HTML input
#
# history:
# 2002-04-06 fl created
# 2002-04-07 fl ignore IMG and HR end tags
# 2002-04-07 fl added support for 1.5.2 and later
# 2003-04-13 fl added HTMLTreeBuilder alias
# 2004-12-02 fl don't feed non-ASCII charrefs/entities as 8-bit strings
# 2004-12-05 fl don't feed non-ASCII CDATA as 8-bit strings
#
# Copyright (c) 1999-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to build element trees from HTML files.
##
import htmlentitydefs
import re, string, sys
import mimetools, StringIO
import ElementTree
AUTOCLOSE = "p", "li", "tr", "th", "td", "head", "body"
IGNOREEND = "img", "hr", "meta", "link", "br"
if sys.version[:3] == "1.5":
is_not_ascii = re.compile(r"[\x80-\xff]").search # 1.5.2
else:
is_not_ascii = re.compile(eval(r'u"[\u0080-\uffff]"')).search
try:
from HTMLParser import HTMLParser
except ImportError:
from sgmllib import SGMLParser
# hack to use sgmllib's SGMLParser to emulate 2.2's HTMLParser
class HTMLParser(SGMLParser):
# the following only works as long as this class doesn't
# provide any do, start, or end handlers
def unknown_starttag(self, tag, attrs):
self.handle_starttag(tag, attrs)
def unknown_endtag(self, tag):
self.handle_endtag(tag)
##
# ElementTree builder for HTML source code. This builder converts an
# HTML document or fragment to an ElementTree.
# <p>
# The parser is relatively picky, and requires balanced tags for most
# elements. However, elements belonging to the following group are
# automatically closed: P, LI, TR, TH, and TD. In addition, the
# parser automatically inserts end tags immediately after the start
# tag, and ignores any end tags for the following group: IMG, HR,
# META, and LINK.
#
# @keyparam builder Optional builder object. If omitted, the parser
# uses the standard <b>elementtree</b> builder.
# @keyparam encoding Optional character encoding, if known. If omitted,
# the parser looks for META tags inside the document. If no tags
# are found, the parser defaults to ISO-8859-1. Note that if your
# document uses a non-ASCII compatible encoding, you must decode
# the document before parsing.
#
# @see elementtree.ElementTree
class HTMLTreeBuilder(HTMLParser):
# FIXME: shouldn't this class be named Parser, not Builder?
def __init__(self, builder=None, encoding=None):
self.__stack = []
if builder is None:
builder = ElementTree.TreeBuilder()
self.__builder = builder
self.encoding = encoding or "iso-8859-1"
HTMLParser.__init__(self)
##
# Flushes parser buffers, and return the root element.
#
# @return An Element instance.
def close(self):
HTMLParser.close(self)
return self.__builder.close()
##
# (Internal) Handles start tags.
def handle_starttag(self, tag, attrs):
if tag == "meta":
# look for encoding directives
http_equiv = content = None
for k, v in attrs:
if k == "http-equiv":
http_equiv = string.lower(v)
elif k == "content":
content = v
if http_equiv == "content-type" and content:
# use mimetools to parse the http header
header = mimetools.Message(
StringIO.StringIO("%s: %s\n\n" % (http_equiv, content))
)
encoding = header.getparam("charset")
if encoding:
self.encoding = encoding
if tag in AUTOCLOSE:
if self.__stack and self.__stack[-1] == tag:
self.handle_endtag(tag)
self.__stack.append(tag)
attrib = {}
if attrs:
for k, v in attrs:
attrib[string.lower(k)] = v
self.__builder.start(tag, attrib)
if tag in IGNOREEND:
self.__stack.pop()
self.__builder.end(tag)
##
# (Internal) Handles end tags.
def handle_endtag(self, tag):
if tag in IGNOREEND:
return
lasttag = self.__stack.pop()
if tag != lasttag and lasttag in AUTOCLOSE:
self.handle_endtag(lasttag)
self.__builder.end(tag)
##
# (Internal) Handles character references.
def handle_charref(self, char):
if char[:1] == "x":
char = int(char[1:], 16)
else:
char = int(char)
if 0 <= char < 128:
self.__builder.data(chr(char))
else:
self.__builder.data(unichr(char))
##
# (Internal) Handles entity references.
def handle_entityref(self, name):
entity = htmlentitydefs.entitydefs.get(name)
if entity:
if len(entity) == 1:
entity = ord(entity)
else:
entity = int(entity[2:-1])
if 0 <= entity < 128:
self.__builder.data(chr(entity))
else:
self.__builder.data(unichr(entity))
else:
self.unknown_entityref(name)
##
# (Internal) Handles character data.
def handle_data(self, data):
if isinstance(data, type('')) and is_not_ascii(data):
# convert to unicode, but only if necessary
data = unicode(data, self.encoding, "ignore")
self.__builder.data(data)
##
# (Hook) Handles unknown entity references. The default action
# is to ignore unknown entities.
def unknown_entityref(self, name):
pass # ignore by default; override if necessary
##
# An alias for the <b>HTMLTreeBuilder</b> class.
TreeBuilder = HTMLTreeBuilder
##
# Parse an HTML document or document fragment.
#
# @param source A filename or file object containing HTML data.
# @param encoding Optional character encoding, if known. If omitted,
# the parser looks for META tags inside the document. If no tags
# are found, the parser defaults to ISO-8859-1.
# @return An ElementTree instance
def parse(source, encoding=None):
return ElementTree.parse(source, HTMLTreeBuilder(encoding=encoding))
if __name__ == "__main__":
import sys
ElementTree.dump(parse(open(sys.argv[1])))
|
yuzhangcmu/Python-Study
|
refs/heads/master
|
Leetcode/Search_in_Rotated_Sorted_Array_II.py
|
2
|
"""
Follow up for "Search in Rotated Sorted Array":
What if duplicates are allowed?
Would this affect the run-time complexity? How and why?
Write a function to determine if a given target is in the array.
"""
class Solution:
# @param A a list of integers
# @param target an integer
# @return a boolean
def search(self, A, target):
start = 0
end = len(A) - 1
while start <= end:
mid = (start + end) / 2
if A[mid] == target:
return True
elif A[start] < A[mid]: # First half sorted
if A[start] <= target and target < A[mid]:
end = mid - 1
else:
start = mid + 1
elif A[start]> A[mid]: # Second half sorted
if A[mid] < target and target <= A[end]:
start = mid + 1
else:
end = mid - 1
else:
start += 1
return False
|
ojengwa/oh-mainline
|
refs/heads/master
|
vendor/packages/django-authopenid/example/urls.py
|
17
|
from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', 'example.views.home'),
(r'^account/', include('django_authopenid.urls')),
(r'^admin/(.*)', admin.site.root),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
|
akhilerm/ZeroCut
|
refs/heads/master
|
python/zerocut.py
|
1
|
import sys
import pygame
from pygame.locals import *
FPS = 30
SCREENWIDTH = SCREENHEIGHT = 512
#The game matrix initialized with None, False for O and True for X, Posession_Matrix stores which all submatrices is possessed by O and X
Game_Matrix = [[None for x in range(9)] for y in range(9)]
Posession_MatrixO = [[None for x in range(3)] for y in range (3)]
Posession_MatrixX = [[None for x in range(3)] for y in range (3)]
Active_Matrix = [[True for x in range(3)] for y in range(3)]
IMAGES, SOUNDS = {},{}
player = True
#Function to convert pixel coordinates into game coordinates -- matrix indices
def get_index(value):
index = [0,0,0,0]
#row conversion
if 0<=value[1]<=163:
index[0] = 0
if 0<=value[1]<=51:
index[2] = 0
elif 56<=value[1]<=107:
index[2] = 1
elif 112<=value[1]<=163:
index[2] = 2
else:
return None
elif 174<=value[1]<=337:
index[0] = 1
if 174<=value[1]<=225:
index[2] = 0
elif 230<=value[1]<=281:
index[2] = 1
elif 286<=value[1]<=337:
index[2] = 2
else:
return None
elif 348<=value[1]<=511:
index[0] = 2
if 348<=value[1]<=399:
index[2] = 0
elif 404<=value[1]<=455:
index[2] = 1
elif 460<=value[1]<=511:
index[2] = 2
else:
return None
else:
return None
#column conversion
if 0<=value[0]<=163:
index[1] = 0
if 0 <= value[0] <= 51:
index[3] = 0
elif 56 <= value[0] <= 107:
index[3] = 1
elif 112 <= value[0] <= 163:
index[3] = 2
else:
return None
elif 174 <= value[0] <= 337:
index[1] = 1
if 174 <= value[0] <= 225:
index[3] = 0
elif 230 <= value[0] <= 281:
index[3] = 1
elif 286 <= value[0] <= 337:
index[3] = 2
else:
return None
elif 348 <= value[0] <= 511:
index[1] = 2
if 348 <= value[0] <= 399:
index[3] = 0
elif 404 <= value[0] <= 455:
index[3] = 1
elif 460 <= value[0] <= 511:
index[3] = 2
else:
return None
else:
return None
return index
#Function to check whether the given matrix is clickable
def is_active(value):
index = get_index(value)
if index is None:
return False
if (Active_Matrix[index[0]][index[1]] == False):
return False
return True
#Function to check whether a 3x3 matrix has got 3 in a row
def check(matrix):
#print matrix
ret = {'O':False,'X':False}
i=j=f1=f2=0
for i in range(3):
if matrix[i][j] == matrix[i][j+1] == matrix[i][j+2]:
if matrix[i][j] is False:
f1 = 1
elif matrix[i][j] is True:
f2 = 1
#break
i=0
for j in range(3):
if matrix[i][j] == matrix[i+1][j] == matrix[i+2][j]:
if matrix[i][j] is False:
f1 = 1
elif matrix[i][j] is True:
f2 = 1
#break
j=0
if matrix[i][j] == matrix[i+1][j+1] == matrix[i+2][j+2]:
if matrix[i][j] is False:
f1 = 1
elif matrix[i][j] is True:
f2 = 1
j=2
if matrix[i][j] == matrix[i+1][j-1] == matrix[i+2][j-2]:
if matrix[i][j] is False:
f1 = 1
elif matrix[i][j] is True:
f2 = 1
if f1 == 1:
ret['O']=True
if f2 == 1:
ret['X']=True
return ret
#Function to display the winner
def won(player):
SOUNDS['win'].play()
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN or event.type == MOUSEBUTTONUP:
pos=pygame.mouse.get_pos()
if 309 <pos[1]<329 and 184 <pos[0]<340:
return
# drawing the screen
SCREEN.blit(IMAGES['gamebg'], (0, 0))
coord = [None, None]
for i in range(9):
for j in range(9):
if Game_Matrix[i][j] is not None:
if i == 0:
coord[0] = 0
elif i == 1:
coord[0] = 56
elif i == 2:
coord[0] = 112
elif i == 3:
coord[0] = 174
elif i == 4:
coord[0] = 230
elif i == 5:
coord[0] = 286
elif i == 6:
coord[0] = 348
elif i == 7:
coord[0] = 404
else:
coord[0] = 460
if j == 0:
coord[1] = 0
elif j == 1:
coord[1] = 56
elif j == 2:
coord[1] = 112
elif j == 3:
coord[1] = 174
elif j == 4:
coord[1] = 230
elif j == 5:
coord[1] = 286
elif j == 6:
coord[1] = 348
elif j == 7:
coord[1] = 404
else:
coord[1] = 460
if Game_Matrix[i][j] == True:
SCREEN.blit(IMAGES['X'], (coord[1], coord[0]))
else:
SCREEN.blit(IMAGES['O'], (coord[1], coord[0]))
if player == True:
SCREEN.blit(IMAGES['X_win'], (0, 162))
else:
SCREEN.blit(IMAGES['O_win'], (0, 162))
pygame.display.update()
FPSCLOCK.tick(FPS)
def check_full(matrix):
for i in range(3):
for j in range(3):
if matrix[i][j] is None:
return False
return True
def playgame():
global Active_Matrix
global player
#game loop
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN and event.key == K_ESCAPE:
player = True
return
if event.type == MOUSEBUTTONUP:
pos = pygame.mouse.get_pos()
index1 = get_index(pos)
if is_active(pos) == False or (is_active(pos) == True and Game_Matrix[index1[0]*3+index1[2]][index1[1]*3+index1[3]] is not None):
SOUNDS['error'].play()
else :
#valid movement
index = index1
Game_Matrix[index[0] * 3 + index[2]][index[1] * 3 + index[3]] = player
#print 'check 1'
#print 'index'
#print index
status = check([row[index[1]*3:index[1]*3+3] for row in Game_Matrix[index[0]*3:index[0]*3+3]]) #'O','X'
#print status
Active_Matrix = [[False for x in range(3)] for y in range(3)]
Active_Matrix[index[2]][index[3]] = True
if check_full([row[index[3]*3:index[3]*3+3] for row in Game_Matrix[index[2]*3:index[2]*3+3]]) is True:
#print 'Full poi'
Active_Matrix = [[True for x in range(3)] for y in range(3)]
if status['O'] == True and player == False:
#print 'in O'
Posession_MatrixO[index[0]][index[1]] = True
#print Posession_MatrixO
#print 'check 2'
win_status = check(Posession_MatrixO)
if win_status['X'] == True: #win_status['X'] is used since both posession matrices use true to represent possesed ...
#print 'Ojayichu'
won(False)
return
player = not player
continue
if status['X'] == True and player == True:
#print 'in X'
Posession_MatrixX[index[0]][index[1]] = True
#print Posession_MatrixX
#print 'check 2'
win_status = check(Posession_MatrixX)
if win_status['X'] == True:
#print 'Xjayichu'
won(True)
return
player = not player
continue
#do all operations and change player
player = not player
#drawing the screen
SCREEN.blit(IMAGES['gamebg'],(0,0))
coord = [None, None]
for i in range(9):
for j in range(9):
if Game_Matrix[i][j] is not None:
if i == 0:
coord[0]=0
elif i == 1:
coord[0]=56
elif i == 2:
coord[0]=112
elif i == 3:
coord[0]=174
elif i == 4:
coord[0]=230
elif i == 5:
coord[0]=286
elif i == 6:
coord[0]=348
elif i == 7:
coord[0]=404
else:
coord[0]=460
if j == 0:
coord[1]=0
elif j == 1:
coord[1]=56
elif j == 2:
coord[1]=112
elif j == 3:
coord[1]=174
elif j == 4:
coord[1]=230
elif j == 5:
coord[1]=286
elif j == 6:
coord[1]=348
elif j == 7:
coord[1]=404
else:
coord[1]=460
if Game_Matrix[i][j] == True:
SCREEN.blit(IMAGES['X'],(coord[1],coord[0]))
else:
SCREEN.blit(IMAGES['O'], (coord[1], coord[0]))
#setting the shadow for inactive matrices
if all(Active_Matrix[0]) == all(Active_Matrix[1]) == all(Active_Matrix[2]) == False:
if(index[2] == 1 and index[3] == 1):
if player is True:
SCREEN.blit(IMAGES['wc_shad'],(0,0))
else:
SCREEN.blit(IMAGES['bc_shad'], (0, 0))
elif index[2] == 1 or index[3] == 1:
if player is True:
shad = 'wm_shad'
else:
shad = 'bm_shad'
if index[2] == 1 and index[3] == 2:
SCREEN.blit(pygame.transform.rotate(IMAGES[shad],270),(0,0))
elif index[2] == 2 and index[3] == 1:
SCREEN.blit(pygame.transform.rotate(IMAGES[shad], 180), (0, 0))
elif index[2] == 1 and index[3] == 0:
SCREEN.blit(pygame.transform.rotate(IMAGES[shad], 90), (0, 0))
else:
SCREEN.blit(IMAGES[shad], (0, 0))
else:
if player is True:
shad = 'ws_shad'
else:
shad = 'bs_shad'
if index[2] == 0 and index[3] == 2:
SCREEN.blit(pygame.transform.rotate(IMAGES[shad], 270), (0, 0))
elif index[2] == 2 and index[3] == 2:
SCREEN.blit(pygame.transform.rotate(IMAGES[shad], 180), (0, 0))
elif index[2] == 2 and index[3] == 0:
SCREEN.blit(pygame.transform.rotate(IMAGES[shad], 90), (0, 0))
else:
SCREEN.blit(IMAGES[shad], (0, 0))
pygame.display.update()
FPSCLOCK.tick(FPS)
def main():
global SCREEN, FPSCLOCK,Game_Matrix,Posession_MatrixO,Posession_MatrixX,Active_Matrix
pygame.init()
FPSCLOCK = pygame.time.Clock()
logo = pygame.image.load('assets/images/ic_launcher_32.png')
SCREEN = pygame.display.set_mode((SCREENWIDTH,SCREENHEIGHT))#,pygame.NOFRAME)
pygame.display.set_caption('ZeroCut')
pygame.display.set_icon(logo)
IMAGES['gamebg'] = pygame.image.load('assets/images/game.png').convert()
IMAGES['welcome'] = pygame.image.load('assets/images/bg.png').convert()
IMAGES['bc_shad'] = pygame.image.load('assets/images/bshadowc.png').convert_alpha()
IMAGES['bm_shad'] = pygame.image.load('assets/images/bshadowm.png').convert_alpha()
IMAGES['bs_shad'] = pygame.image.load('assets/images/bshadows.png').convert_alpha()
IMAGES['wc_shad'] = pygame.image.load('assets/images/wshadowc.png').convert_alpha()
IMAGES['wm_shad'] = pygame.image.load('assets/images/wshadowm.png').convert_alpha()
IMAGES['ws_shad'] = pygame.image.load('assets/images/wshadows.png').convert_alpha()
IMAGES['O'] = pygame.image.load('assets/images/O.png').convert_alpha()
IMAGES['X'] = pygame.image.load('assets/images/X.png').convert_alpha()
IMAGES['X_win'] = pygame.image.load('assets/images/winx.png').convert_alpha()
IMAGES['O_win'] = pygame.image.load('assets/images/wino.png').convert_alpha()
#sounds
if 'win' in sys.platform:
soundExt = '.wav'
else:
soundExt = '.ogg'
SOUNDS['error'] = pygame.mixer.Sound('assets/audio/err'+soundExt)
SOUNDS['win'] = pygame.mixer.Sound('assets/audio/win'+soundExt)
#Overall game loop
while True:
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONUP:
Game_Matrix = [[None for x in range(9)] for y in range(9)]
Posession_MatrixO = [[None for x in range(3)] for y in range(3)]
Posession_MatrixX = [[None for x in range(3)] for y in range(3)]
Active_Matrix = [[True for x in range(3)] for y in range(3)]
pos = pygame.mouse.get_pos()
if 147 < pos[0] <366 and 368 < pos [1] < 420 :
playgame()
SCREEN.blit(IMAGES['welcome'], (0, 0))
pygame.display.update()
FPSCLOCK.tick(FPS)
if __name__ == '__main__':
main()
|
mmnelemane/nova
|
refs/heads/master
|
nova/api/openstack/compute/schemas/availability_zone.py
|
73
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
server_create = {
'availability_zone': parameter_types.name,
}
|
tuomas2/sword_studybibles
|
refs/heads/develop
|
setup.py
|
1
|
#!/usr/bin/env python
import setuptools
setuptools.setup(name='study2osis', version='0.0.5',
package_dir={"": "src"},
packages=setuptools.find_packages('src'),
include_package_data=True,
zip_safe=False)
|
Huskynarr/gnome15
|
refs/heads/master
|
src/plugins/indicator-messages/default/indicator_messages_default_default.py
|
8
|
import indicator_messages_default_common
class Theme(indicator_messages_default_common.Theme):
def __init__(self, screen, theme):
indicator_messages_default_common.Theme.__init__(self, screen, theme)
def paint_foreground(self, canvas, properties, attributes, args):
indicator_messages_default_common.Theme.paint_foreground(self, canvas, properties, attributes, args, 1, 12)
|
artreven/pp_api
|
refs/heads/master
|
pp_api/sparql_calls.py
|
1
|
import requests
import numpy as np
import rdflib
def get_corpus_analysis_graphs(corpus_id):
corpusgraph_id = 'corpusgraph:' + corpus_id[7:]
termsgraph_id = corpusgraph_id + ':extractedTerms'
cpt_occur_graph_id = corpusgraph_id + ':conceptOccurrences'
cooc_graph = corpusgraph_id + ':cooccurrence'
return corpusgraph_id, termsgraph_id, cpt_occur_graph_id, cooc_graph
def get_corpus_zscores(term_uris, cooc_corpus_graph):
"""
Get zscores for term-term cooccurrences.
:param term_uris: list: uris of 2 terms
:param cooc_corpus_graph: graph of corpus coocs
:return: float [0, 1]: similarity score := zscore/max(zscore)
"""
def similarity(term1_uri, term2_uri):
if term1_uri == term2_uri:
return 1
elif (term1_uri, term2_uri) in sim_matrix:
return sim_matrix[(term1_uri, term2_uri)]
elif (term2_uri, term1_uri) in sim_matrix:
return sim_matrix[(term2_uri, term1_uri)]
else:
return 0
query_text = """
select ?uri1 ?uri2 ?score where {{
?uri1 <http://schema.semantic-web.at/ppcm/2013/5/hasTermCooccurrence> ?co.
?co <http://schema.semantic-web.at/ppcm/2013/5/cooccurringExtractedTerm> ?uri2.
?co <http://schema.semantic-web.at/ppcm/2013/5/zscore> ?score.
}}"""
params = {
'default-graph-uri': '{}'.format(cooc_corpus_graph),
'query': query_text,
'format': 'json',
}
r = requests.get('https://aligned-virtuoso.poolparty.biz/sparql',
params=params)
assert r.status_code == 200
sim_matrix = dict()
for binding in r.json()['results']['bindings']:
uri1 = binding['uri1']['value']
uri2 = binding['uri2']['value']
if uri1 in term_uris and uri2 in term_uris:
sim_matrix[(uri1, uri2)] = np.log2(float(binding['score']['value']))
max_score = max(sim_matrix.values())
for k in sim_matrix:
sim_matrix[k] /= max_score
return similarity
def get_pp_terms(corpus_graph_terms, crs_threshold=5):
"""
Load all terms with combinedRelevanceScore is greater than CRS_threshold
from the graph corpus_graph_terms.
:param corpus_graph_terms: uri of the graph
:param crs_threshold: min combinedRelevanceScore of term to be returned
:return:
"""
params = {
'default-graph-uri': '{}'.format(corpus_graph_terms),
'query': """
select ?termUri ?name ?score where {{
?termUri <http://schema.semantic-web.at/ppcm/2013/5/combinedRelevanceScore> ?score .
?termUri <http://schema.semantic-web.at/ppcm/2013/5/name> ?name .
filter (?score > {})
}} order by desc(?score)""".format(crs_threshold),
'format': 'json',
}
r = requests.get('https://aligned-virtuoso.poolparty.biz/sparql',
params=params)
top_terms_scores = dict()
top_terms_uris = dict()
for new_term in r.json()['results']['bindings']:
name = new_term['name']['value']
score = float(new_term['score']['value'])
term_uri = new_term['termUri']['value']
top_terms_scores[name] = score
top_terms_uris[name] = term_uri
return top_terms_scores, top_terms_uris
all_data_q = """
select distinct * where {{
?s ?p ?o
}}
"""
q_get_doc_text_by_doc_id = """
select distinct * where {{
<{doc_id}> <http://schema.semantic-web.at/ppcm/2013/5/htmlText> ?o
}}
"""
def query_sparql_endpoint(sparql_endpoint, query=all_data_q):
graph = rdflib.ConjunctiveGraph('SPARQLStore')
rt = graph.open(sparql_endpoint)
rs = graph.query(query)
return rs
def get_ridfs(sparql_endpoint, termsgraph):
q_term_scores = """
select distinct ?lemma ?ridf ?crs where {{
GRAPH <{}> {{
?s <http://schema.semantic-web.at/ppcm/2013/5/textValue> ?lemma .
?s <http://schema.semantic-web.at/ppcm/2013/5/ridfTermScore> ?ridf .
?s <http://schema.semantic-web.at/ppcm/2013/5/combinedRelevanceScore> ?crs .
}}
}}
""".format(termsgraph)
rs = query_sparql_endpoint(sparql_endpoint, q_term_scores)
results = dict()
for r in rs:
results[str(r[0])] = float(r[2])
return results
def query_cpt_cooc_scores(sparql_endpoint, cpt_cooc_graph):
q_cooc_score = """
select distinct ?cpt1 ?cpt2 ?score where {{
GRAPH <{}> {{
?cpt1 <http://schema.semantic-web.at/ppcm/2013/5/hasConceptCooccurrence> ?o .
?o <http://schema.semantic-web.at/ppcm/2013/5/cooccurringExtractedConcept> ?cpt2 .
?o <http://schema.semantic-web.at/ppcm/2013/5/score> ?score
}}
}}
""".format(cpt_cooc_graph)
rs = query_sparql_endpoint(sparql_endpoint, q_cooc_score)
dist_mx = dict()
for r in rs:
cpt1 = str(r[0])
cpt2 = str(r[1])
score = float(r[2])
try:
dist_mx[cpt1][cpt2] = score
except KeyError:
dist_mx[cpt1] = {cpt2: score}
try:
dist_mx[cpt2][cpt1] = score
except KeyError:
dist_mx[cpt2] = {cpt1: score}
return dist_mx
def query_terms2cpts_cooc_scores(sparql_endpoint, cpt_cooc_graph, terms_graph):
q_cooc_cpt_score = """
select distinct ?tv (group_concat(?cpt;separator="|") as ?cpts) (group_concat(?c_score;separator="|") as ?c_scores) where {{
?s <http://schema.semantic-web.at/ppcm/2013/5/hasConceptCooccurrence> ?co_cpt .
?s <http://schema.semantic-web.at/ppcm/2013/5/textValue> ?tv .
?co_cpt <http://schema.semantic-web.at/ppcm/2013/5/cooccurringExtractedConcept> ?cpt .
?co_cpt <http://schema.semantic-web.at/ppcm/2013/5/score> ?c_score .
}}
"""
q_cooc_term_score = """
select distinct ?tv (group_concat(?cooc_term;separator="|") as ?cooc_terms) (group_concat(?t_score;separator="|") as ?t_scores) where {{
?s <http://schema.semantic-web.at/ppcm/2013/5/textValue> ?tv .
?s <http://schema.semantic-web.at/ppcm/2013/5/hasTermCooccurrence> ?co_term .
?co_term <http://schema.semantic-web.at/ppcm/2013/5/cooccurringExtractedTerm> ?term_view .
?term_view <http://schema.semantic-web.at/ppcm/2013/5/textValue> ?cooc_term .
?co_term <http://schema.semantic-web.at/ppcm/2013/5/score> ?t_score .
}}
"""
q_cooc_cpt_score = """
select distinct ?tv (group_concat(?cpt;separator="|") as ?cpts) (group_concat(?c_score;separator="|") as ?c_scores) where {{
GRAPH <{cooc_graph}> {{
?s <http://schema.semantic-web.at/ppcm/2013/5/hasConceptCooccurrence> ?co_cpt .
?co_cpt <http://schema.semantic-web.at/ppcm/2013/5/cooccurringExtractedConcept> ?cpt .
?co_cpt <http://schema.semantic-web.at/ppcm/2013/5/score> ?c_score .
}} .
GRAPH <{terms_graph}>
{{
?s <http://schema.semantic-web.at/ppcm/2013/5/textValue> ?tv .
}}
}}
""".format(cooc_graph=cpt_cooc_graph, terms_graph=terms_graph)
cpt_rs = query_sparql_endpoint(
sparql_endpoint, query=q_cooc_cpt_score
)
cooc_dict = dict()
for r in cpt_rs:
text_value, cooc_cpts, t_scores = r
cooc_cpts = cooc_cpts.split('|')
t_scores = list(map(float, t_scores.split('|')))
cpts_scores = dict(zip(cooc_cpts, t_scores))
cooc_dict[text_value.toPython()] = cpts_scores
return cooc_dict
if __name__ == '__main__':
pass
|
cysnake4713/account-financial-tools
|
refs/heads/8.0
|
account_invoice_tax_required/models/account_invoice.py
|
23
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville. Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.multi
def test_invoice_line_tax(self):
errors = []
error_template = _("Invoice has a line with product %s with no taxes")
for invoice in self:
for invoice_line in invoice.invoice_line:
if not invoice_line.invoice_line_tax_id:
error_string = error_template % (invoice_line.name)
errors.append(error_string)
if errors:
errors_full_string = ','.join(x for x in errors)
raise exceptions.Warning(_('No Taxes Defined!'),
errors_full_string)
else:
return True
@api.multi
def invoice_validate(self):
self.test_invoice_line_tax()
res = super(AccountInvoice, self).invoice_validate()
return res
|
ProfessionalIT/maxigenios-website
|
refs/heads/master
|
sdk/google_appengine/lib/django-1.3/django/contrib/localflavor/tr/tr_provinces.py
|
316
|
# -*- coding: utf-8 -*-
"""
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
PROVINCE_CHOICES = (
('01', ('Adana')),
('02', ('Adıyaman')),
('03', ('Afyonkarahisar')),
('04', ('Ağrı')),
('68', ('Aksaray')),
('05', ('Amasya')),
('06', ('Ankara')),
('07', ('Antalya')),
('75', ('Ardahan')),
('08', ('Artvin')),
('09', ('Aydın')),
('10', ('Balıkesir')),
('74', ('Bartın')),
('72', ('Batman')),
('69', ('Bayburt')),
('11', ('Bilecik')),
('12', ('Bingöl')),
('13', ('Bitlis')),
('14', ('Bolu')),
('15', ('Burdur')),
('16', ('Bursa')),
('17', ('Çanakkale')),
('18', ('Çankırı')),
('19', ('Çorum')),
('20', ('Denizli')),
('21', ('Diyarbakır')),
('81', ('Düzce')),
('22', ('Edirne')),
('23', ('Elazığ')),
('24', ('Erzincan')),
('25', ('Erzurum')),
('26', ('Eskişehir')),
('27', ('Gaziantep')),
('28', ('Giresun')),
('29', ('Gümüşhane')),
('30', ('Hakkari')),
('31', ('Hatay')),
('76', ('Iğdır')),
('32', ('Isparta')),
('33', ('Mersin')),
('34', ('İstanbul')),
('35', ('İzmir')),
('78', ('Karabük')),
('36', ('Kars')),
('37', ('Kastamonu')),
('38', ('Kayseri')),
('39', ('Kırklareli')),
('40', ('Kırşehir')),
('41', ('Kocaeli')),
('42', ('Konya')),
('43', ('Kütahya')),
('44', ('Malatya')),
('45', ('Manisa')),
('46', ('Kahramanmaraş')),
('70', ('Karaman')),
('71', ('Kırıkkale')),
('79', ('Kilis')),
('47', ('Mardin')),
('48', ('Muğla')),
('49', ('Muş')),
('50', ('Nevşehir')),
('51', ('Niğde')),
('52', ('Ordu')),
('80', ('Osmaniye')),
('53', ('Rize')),
('54', ('Sakarya')),
('55', ('Samsun')),
('56', ('Siirt')),
('57', ('Sinop')),
('58', ('Sivas')),
('73', ('Şırnak')),
('59', ('Tekirdağ')),
('60', ('Tokat')),
('61', ('Trabzon')),
('62', ('Tunceli')),
('63', ('Şanlıurfa')),
('64', ('Uşak')),
('65', ('Van')),
('77', ('Yalova')),
('66', ('Yozgat')),
('67', ('Zonguldak')),
)
|
Lujeni/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/google/gcp_compute_network.py
|
10
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_network
description:
- Manages a VPC network or legacy network resource on GCP.
short_description: Creates a GCP Network
version_added: '2.6'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
type: str
description:
description:
- An optional description of this resource. The resource must be recreated to
modify this field.
required: false
type: str
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
type: str
auto_create_subnetworks:
description:
- When set to `true`, the network is created in "auto subnet mode" and it will
create a subnet for each region automatically across the `10.128.0.0/9` address
range.
- When set to `false`, the network is created in "custom subnet mode" so the user
can explicitly connect subnetwork resources.
required: false
type: bool
routing_config:
description:
- The network-level routing configuration for this network. Used by Cloud Router
to determine what type of network-wide routing behavior to enforce.
required: false
type: dict
version_added: '2.8'
suboptions:
routing_mode:
description:
- The network-wide routing mode to use. If set to `REGIONAL`, this network's
cloud routers will only advertise routes with subnetworks of this network
in the same region as the router. If set to `GLOBAL`, this network's cloud
routers will advertise routes with all subnetworks of this network, across
regions.
- 'Some valid choices include: "REGIONAL", "GLOBAL"'
required: true
type: str
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/rest/v1/networks)'
- 'Official Documentation: U(https://cloud.google.com/vpc/docs/vpc)'
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: create a network
gcp_compute_network:
name: test_object
auto_create_subnetworks: 'true'
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
description:
description:
- An optional description of this resource. The resource must be recreated to modify
this field.
returned: success
type: str
gateway_ipv4:
description:
- The gateway address for default routing out of the network. This value is selected
by GCP.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
subnetworks:
description:
- Server-defined fully-qualified URLs for all subnetworks in this network.
returned: success
type: list
autoCreateSubnetworks:
description:
- When set to `true`, the network is created in "auto subnet mode" and it will create
a subnet for each region automatically across the `10.128.0.0/9` address range.
- When set to `false`, the network is created in "custom subnet mode" so the user
can explicitly connect subnetwork resources.
returned: success
type: bool
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
routingConfig:
description:
- The network-level routing configuration for this network. Used by Cloud Router
to determine what type of network-wide routing behavior to enforce.
returned: success
type: complex
contains:
routingMode:
description:
- The network-wide routing mode to use. If set to `REGIONAL`, this network's
cloud routers will only advertise routes with subnetworks of this network
in the same region as the router. If set to `GLOBAL`, this network's cloud
routers will advertise routes with all subnetworks of this network, across
regions.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
description=dict(type='str'),
name=dict(required=True, type='str'),
auto_create_subnetworks=dict(type='bool'),
routing_config=dict(type='dict', options=dict(routing_mode=dict(required=True, type='str'))),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#network'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind, fetch)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
update_fields(module, resource_to_request(module), response_to_hash(module, fetch))
return fetch_resource(module, self_link(module), kind)
def update_fields(module, request, response):
if response.get('routingConfig') != request.get('routingConfig'):
routing_config_update(module, request, response)
def routing_config_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.patch(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/global/networks/{name}"]).format(**module.params),
{u'routingConfig': NetworkRoutingconfig(module.params.get('routing_config', {}), module).to_request()},
)
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#network',
u'description': module.params.get('description'),
u'name': module.params.get('name'),
u'autoCreateSubnetworks': module.params.get('auto_create_subnetworks'),
u'routingConfig': NetworkRoutingconfig(module.params.get('routing_config', {}), module).to_request(),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/networks".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'description': module.params.get('description'),
u'gatewayIPv4': response.get(u'gatewayIPv4'),
u'id': response.get(u'id'),
u'name': module.params.get('name'),
u'subnetworks': response.get(u'subnetworks'),
u'autoCreateSubnetworks': module.params.get('auto_create_subnetworks'),
u'creationTimestamp': response.get(u'creationTimestamp'),
u'routingConfig': NetworkRoutingconfig(response.get(u'routingConfig', {}), module).from_response(),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#network')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation', False)
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
class NetworkRoutingconfig(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({u'routingMode': self.request.get('routing_mode')})
def from_response(self):
return remove_nones_from_dict({u'routingMode': self.request.get(u'routingMode')})
if __name__ == '__main__':
main()
|
safwanrahman/linuxdesh
|
refs/heads/master
|
kitsune/users/admin.py
|
15
|
from django import forms
from django.contrib import admin
from kitsune.users import monkeypatch
from kitsune.users.models import Profile
class ProfileAdminForm(forms.ModelForm):
delete_avatar = forms.BooleanField(required=False, help_text=(
"Check to remove the user's avatar."))
class Meta(object):
model = Profile
class ProfileAdmin(admin.ModelAdmin):
actions = None
fieldsets = (
(None, {
'fields': ['user', 'name', 'public_email',
('avatar', 'delete_avatar'), 'bio'],
}),
('Contact Info', {
'fields': ['website', 'twitter', 'facebook', 'mozillians', 'irc_handle'],
'classes': ['collapse'],
}),
('Location', {
'fields': ['timezone', ('country', 'city'), 'locale'],
'classes': ['collapse'],
}),
)
form = ProfileAdminForm
list_display = ['full_user']
list_select_related = True
readonly_fields = ['user']
search_fields = ['user__username', 'user__email', 'name']
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def full_user(self, obj):
if obj.name:
return u'%s <%s>' % (obj.user.username, obj.name)
else:
return obj.user.username
full_user.short_description = 'User'
def save_model(self, request, obj, form, change):
delete_avatar = form.cleaned_data.pop('delete_avatar', False)
if delete_avatar and obj.avatar:
obj.avatar.delete()
obj.save()
admin.site.register(Profile, ProfileAdmin)
monkeypatch.patch_all()
|
DecipherOne/Troglodyte
|
refs/heads/master
|
Trog Build Dependencies/Python26/Lib/encodings/hex_codec.py
|
528
|
""" Python 'hex_codec' Codec - 2-digit hex content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs, binascii
### Codec APIs
def hex_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.b2a_hex(input)
return (output, len(input))
def hex_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input,errors='strict'):
return hex_encode(input,errors)
def decode(self, input,errors='strict'):
return hex_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
assert self.errors == 'strict'
return binascii.b2a_hex(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
assert self.errors == 'strict'
return binascii.a2b_hex(input)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='hex',
encode=hex_encode,
decode=hex_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
azureplus/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/defaultfilters/tests.py
|
48
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import decimal
from django.template.defaultfilters import *
from django.test import TestCase
from django.test.utils import TransRealMixin
from django.utils import six
from django.utils import unittest, translation
from django.utils.safestring import SafeData
from django.utils.encoding import python_2_unicode_compatible
class DefaultFiltersTests(TestCase):
def test_floatformat(self):
self.assertEqual(floatformat(7.7), '7.7')
self.assertEqual(floatformat(7.0), '7')
self.assertEqual(floatformat(0.7), '0.7')
self.assertEqual(floatformat(0.07), '0.1')
self.assertEqual(floatformat(0.007), '0.0')
self.assertEqual(floatformat(0.0), '0')
self.assertEqual(floatformat(7.7, 3), '7.700')
self.assertEqual(floatformat(6.000000, 3), '6.000')
self.assertEqual(floatformat(6.200000, 3), '6.200')
self.assertEqual(floatformat(6.200000, -3), '6.200')
self.assertEqual(floatformat(13.1031, -3), '13.103')
self.assertEqual(floatformat(11.1197, -2), '11.12')
self.assertEqual(floatformat(11.0000, -2), '11')
self.assertEqual(floatformat(11.000001, -2), '11.00')
self.assertEqual(floatformat(8.2798, 3), '8.280')
self.assertEqual(floatformat(5555.555, 2), '5555.56')
self.assertEqual(floatformat(001.3000, 2), '1.30')
self.assertEqual(floatformat(0.12345, 2), '0.12')
self.assertEqual(floatformat(decimal.Decimal('555.555'), 2), '555.56')
self.assertEqual(floatformat(decimal.Decimal('09.000')), '9')
self.assertEqual(floatformat('foo'), '')
self.assertEqual(floatformat(13.1031, 'bar'), '13.1031')
self.assertEqual(floatformat(18.125, 2), '18.13')
self.assertEqual(floatformat('foo', 'bar'), '')
self.assertEqual(floatformat('¿Cómo esta usted?'), '')
self.assertEqual(floatformat(None), '')
# Check that we're not converting to scientific notation.
self.assertEqual(floatformat(0, 6), '0.000000')
self.assertEqual(floatformat(0, 7), '0.0000000')
self.assertEqual(floatformat(0, 10), '0.0000000000')
self.assertEqual(floatformat(0.000000000000000000015, 20),
'0.00000000000000000002')
pos_inf = float(1e30000)
self.assertEqual(floatformat(pos_inf), six.text_type(pos_inf))
neg_inf = float(-1e30000)
self.assertEqual(floatformat(neg_inf), six.text_type(neg_inf))
nan = pos_inf / pos_inf
self.assertEqual(floatformat(nan), six.text_type(nan))
class FloatWrapper(object):
def __init__(self, value):
self.value = value
def __float__(self):
return self.value
self.assertEqual(floatformat(FloatWrapper(11.000001), -2), '11.00')
# Regression for #15789
decimal_ctx = decimal.getcontext()
old_prec, decimal_ctx.prec = decimal_ctx.prec, 2
try:
self.assertEqual(floatformat(1.2345, 2), '1.23')
self.assertEqual(floatformat(15.2042, -3), '15.204')
self.assertEqual(floatformat(1.2345, '2'), '1.23')
self.assertEqual(floatformat(15.2042, '-3'), '15.204')
self.assertEqual(floatformat(decimal.Decimal('1.2345'), 2), '1.23')
self.assertEqual(floatformat(decimal.Decimal('15.2042'), -3), '15.204')
finally:
decimal_ctx.prec = old_prec
def test_floatformat_py2_fail(self):
self.assertEqual(floatformat(1.00000000000000015, 16), '1.0000000000000002')
# The test above fails because of Python 2's float handling. Floats with
# many zeroes after the decimal point should be passed in as another type
# such as unicode or Decimal.
if six.PY2:
test_floatformat_py2_fail = unittest.expectedFailure(test_floatformat_py2_fail)
def test_addslashes(self):
self.assertEqual(addslashes('"double quotes" and \'single quotes\''),
'\\"double quotes\\" and \\\'single quotes\\\'')
self.assertEqual(addslashes(r'\ : backslashes, too'),
'\\\\ : backslashes, too')
def test_capfirst(self):
self.assertEqual(capfirst('hello world'), 'Hello world')
def test_escapejs(self):
self.assertEqual(escapejs_filter('"double quotes" and \'single quotes\''),
'\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027')
self.assertEqual(escapejs_filter(r'\ : backslashes, too'),
'\\u005C : backslashes, too')
self.assertEqual(escapejs_filter('and lots of whitespace: \r\n\t\v\f\b'),
'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008')
self.assertEqual(escapejs_filter(r'<script>and this</script>'),
'\\u003Cscript\\u003Eand this\\u003C/script\\u003E')
self.assertEqual(
escapejs_filter('paragraph separator:\u2029and line separator:\u2028'),
'paragraph separator:\\u2029and line separator:\\u2028')
def test_fix_ampersands(self):
self.assertEqual(fix_ampersands_filter('Jack & Jill & Jeroboam'),
'Jack & Jill & Jeroboam')
def test_linenumbers(self):
self.assertEqual(linenumbers('line 1\nline 2'),
'1. line 1\n2. line 2')
self.assertEqual(linenumbers('\n'.join(['x'] * 10)),
'01. x\n02. x\n03. x\n04. x\n05. x\n06. x\n07. '\
'x\n08. x\n09. x\n10. x')
def test_lower(self):
self.assertEqual(lower('TEST'), 'test')
# uppercase E umlaut
self.assertEqual(lower('\xcb'), '\xeb')
def test_make_list(self):
self.assertEqual(make_list('abc'), ['a', 'b', 'c'])
self.assertEqual(make_list(1234), ['1', '2', '3', '4'])
def test_slugify(self):
self.assertEqual(slugify(' Jack & Jill like numbers 1,2,3 and 4 and'\
' silly characters ?%.$!/'),
'jack-jill-like-numbers-123-and-4-and-silly-characters')
self.assertEqual(slugify("Un \xe9l\xe9phant \xe0 l'or\xe9e du bois"),
'un-elephant-a-loree-du-bois')
def test_stringformat(self):
self.assertEqual(stringformat(1, '03d'), '001')
self.assertEqual(stringformat(1, 'z'), '')
def test_title(self):
self.assertEqual(title('a nice title, isn\'t it?'),
"A Nice Title, Isn't It?")
self.assertEqual(title('discoth\xe8que'), 'Discoth\xe8que')
def test_truncatewords(self):
self.assertEqual(
truncatewords('A sentence with a few words in it', 1), 'A ...')
self.assertEqual(
truncatewords('A sentence with a few words in it', 5),
'A sentence with a few ...')
self.assertEqual(
truncatewords('A sentence with a few words in it', 100),
'A sentence with a few words in it')
self.assertEqual(
truncatewords('A sentence with a few words in it',
'not a number'), 'A sentence with a few words in it')
def test_truncatewords_html(self):
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 0), '')
self.assertEqual(truncatewords_html('<p>one <a href="#">two - '\
'three <br>four</a> five</p>', 2),
'<p>one <a href="#">two ...</a></p>')
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 4),
'<p>one <a href="#">two - three <br>four ...</a></p>')
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 5),
'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 100),
'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatewords_html(
'\xc5ngstr\xf6m was here', 1), '\xc5ngstr\xf6m ...')
def test_upper(self):
self.assertEqual(upper('Mixed case input'), 'MIXED CASE INPUT')
# lowercase e umlaut
self.assertEqual(upper('\xeb'), '\xcb')
def test_urlencode(self):
self.assertEqual(urlencode('fran\xe7ois & jill'),
'fran%C3%A7ois%20%26%20jill')
self.assertEqual(urlencode(1), '1')
def test_iriencode(self):
self.assertEqual(iriencode('S\xf8r-Tr\xf8ndelag'),
'S%C3%B8r-Tr%C3%B8ndelag')
self.assertEqual(iriencode(urlencode('fran\xe7ois & jill')),
'fran%C3%A7ois%20%26%20jill')
def test_urlizetrunc(self):
self.assertEqual(urlizetrunc('http://short.com/', 20), '<a href='\
'"http://short.com/" rel="nofollow">http://short.com/</a>')
self.assertEqual(urlizetrunc('http://www.google.co.uk/search?hl=en'\
'&q=some+long+url&btnG=Search&meta=', 20), '<a href="http://'\
'www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search&'\
'meta=" rel="nofollow">http://www.google...</a>')
self.assertEqual(urlizetrunc('http://www.google.co.uk/search?hl=en'\
'&q=some+long+url&btnG=Search&meta=', 20), '<a href="http://'\
'www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search'\
'&meta=" rel="nofollow">http://www.google...</a>')
# Check truncating of URIs which are the exact length
uri = 'http://31characteruri.com/test/'
self.assertEqual(len(uri), 31)
self.assertEqual(urlizetrunc(uri, 31),
'<a href="http://31characteruri.com/test/" rel="nofollow">'\
'http://31characteruri.com/test/</a>')
self.assertEqual(urlizetrunc(uri, 30),
'<a href="http://31characteruri.com/test/" rel="nofollow">'\
'http://31characteruri.com/t...</a>')
self.assertEqual(urlizetrunc(uri, 2),
'<a href="http://31characteruri.com/test/"'\
' rel="nofollow">...</a>')
def test_urlize(self):
# Check normal urlize
self.assertEqual(urlize('http://google.com'),
'<a href="http://google.com" rel="nofollow">http://google.com</a>')
self.assertEqual(urlize('http://google.com/'),
'<a href="http://google.com/" rel="nofollow">http://google.com/</a>')
self.assertEqual(urlize('www.google.com'),
'<a href="http://www.google.com" rel="nofollow">www.google.com</a>')
self.assertEqual(urlize('djangoproject.org'),
'<a href="http://djangoproject.org" rel="nofollow">djangoproject.org</a>')
self.assertEqual(urlize('info@djangoproject.org'),
'<a href="mailto:info@djangoproject.org">info@djangoproject.org</a>')
# Check urlize with https addresses
self.assertEqual(urlize('https://google.com'),
'<a href="https://google.com" rel="nofollow">https://google.com</a>')
# Check urlize doesn't overquote already quoted urls - see #9655
# The teststring is the urlquoted version of 'http://hi.baidu.com/重新开始'
self.assertEqual(urlize('http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B'),
'<a href="http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B" rel="nofollow">'
'http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B</a>')
self.assertEqual(urlize('www.mystore.com/30%OffCoupons!'),
'<a href="http://www.mystore.com/30%25OffCoupons!" rel="nofollow">'
'www.mystore.com/30%OffCoupons!</a>')
self.assertEqual(urlize('http://en.wikipedia.org/wiki/Caf%C3%A9'),
'<a href="http://en.wikipedia.org/wiki/Caf%C3%A9" rel="nofollow">'
'http://en.wikipedia.org/wiki/Caf%C3%A9</a>')
self.assertEqual(urlize('http://en.wikipedia.org/wiki/Café'),
'<a href="http://en.wikipedia.org/wiki/Caf%C3%A9" rel="nofollow">'
'http://en.wikipedia.org/wiki/Café</a>')
# Check urlize keeps balanced parentheses - see #11911
self.assertEqual(urlize('http://en.wikipedia.org/wiki/Django_(web_framework)'),
'<a href="http://en.wikipedia.org/wiki/Django_(web_framework)" rel="nofollow">'
'http://en.wikipedia.org/wiki/Django_(web_framework)</a>')
self.assertEqual(urlize('(see http://en.wikipedia.org/wiki/Django_(web_framework))'),
'(see <a href="http://en.wikipedia.org/wiki/Django_(web_framework)" rel="nofollow">'
'http://en.wikipedia.org/wiki/Django_(web_framework)</a>)')
# Check urlize adds nofollow properly - see #12183
self.assertEqual(urlize('foo@bar.com or www.bar.com'),
'<a href="mailto:foo@bar.com">foo@bar.com</a> or '
'<a href="http://www.bar.com" rel="nofollow">www.bar.com</a>')
# Check urlize handles IDN correctly - see #13704
self.assertEqual(urlize('http://c✶.ws'),
'<a href="http://xn--c-lgq.ws" rel="nofollow">http://c✶.ws</a>')
self.assertEqual(urlize('www.c✶.ws'),
'<a href="http://www.xn--c-lgq.ws" rel="nofollow">www.c✶.ws</a>')
self.assertEqual(urlize('c✶.org'),
'<a href="http://xn--c-lgq.org" rel="nofollow">c✶.org</a>')
self.assertEqual(urlize('info@c✶.org'),
'<a href="mailto:info@xn--c-lgq.org">info@c✶.org</a>')
# Check urlize doesn't highlight malformed URIs - see #16395
self.assertEqual(urlize('http:///www.google.com'),
'http:///www.google.com')
self.assertEqual(urlize('http://.google.com'),
'http://.google.com')
self.assertEqual(urlize('http://@foo.com'),
'http://@foo.com')
# Check urlize accepts more TLDs - see #16656
self.assertEqual(urlize('usa.gov'),
'<a href="http://usa.gov" rel="nofollow">usa.gov</a>')
# Check urlize don't crash on invalid email with dot-starting domain - see #17592
self.assertEqual(urlize('email@.stream.ru'),
'email@.stream.ru')
# Check urlize accepts uppercased URL schemes - see #18071
self.assertEqual(urlize('HTTPS://github.com/'),
'<a href="https://github.com/" rel="nofollow">HTTPS://github.com/</a>')
# Check urlize trims trailing period when followed by parenthesis - see #18644
self.assertEqual(urlize('(Go to http://www.example.com/foo.)'),
'(Go to <a href="http://www.example.com/foo" rel="nofollow">http://www.example.com/foo</a>.)')
# Check urlize handles brackets properly (#19070)
self.assertEqual(urlize('[see www.example.com]'),
'[see <a href="http://www.example.com" rel="nofollow">www.example.com</a>]' )
self.assertEqual(urlize('see test[at[example.com'),
'see <a href="http://test[at[example.com" rel="nofollow">test[at[example.com</a>' )
self.assertEqual(urlize('[http://168.192.0.1](http://168.192.0.1)'),
'[<a href="http://168.192.0.1](http://168.192.0.1)" rel="nofollow">http://168.192.0.1](http://168.192.0.1)</a>')
# Check urlize works with IPv4/IPv6 addresses
self.assertEqual(urlize('http://192.168.0.15/api/9'),
'<a href="http://192.168.0.15/api/9" rel="nofollow">http://192.168.0.15/api/9</a>')
self.assertEqual(urlize('http://[2001:db8:cafe::2]/api/9'),
'<a href="http://[2001:db8:cafe::2]/api/9" rel="nofollow">http://[2001:db8:cafe::2]/api/9</a>')
def test_wordcount(self):
self.assertEqual(wordcount(''), 0)
self.assertEqual(wordcount('oneword'), 1)
self.assertEqual(wordcount('lots of words'), 3)
self.assertEqual(wordwrap('this is a long paragraph of text that '\
'really needs to be wrapped I\'m afraid', 14),
"this is a long\nparagraph of\ntext that\nreally needs\nto be "\
"wrapped\nI'm afraid")
self.assertEqual(wordwrap('this is a short paragraph of text.\n '\
'But this line should be indented', 14),
'this is a\nshort\nparagraph of\ntext.\n But this\nline '\
'should be\nindented')
self.assertEqual(wordwrap('this is a short paragraph of text.\n '\
'But this line should be indented',15), 'this is a short\n'\
'paragraph of\ntext.\n But this line\nshould be\nindented')
def test_rjust(self):
self.assertEqual(ljust('test', 10), 'test ')
self.assertEqual(ljust('test', 3), 'test')
self.assertEqual(rjust('test', 10), ' test')
self.assertEqual(rjust('test', 3), 'test')
def test_center(self):
self.assertEqual(center('test', 6), ' test ')
def test_cut(self):
self.assertEqual(cut('a string to be mangled', 'a'),
' string to be mngled')
self.assertEqual(cut('a string to be mangled', 'ng'),
'a stri to be maled')
self.assertEqual(cut('a string to be mangled', 'strings'),
'a string to be mangled')
def test_force_escape(self):
escaped = force_escape('<some html & special characters > here')
self.assertEqual(
escaped, '<some html & special characters > here')
self.assertIsInstance(escaped, SafeData)
self.assertEqual(
force_escape('<some html & special characters > here ĐÅ€£'),
'<some html & special characters > here'\
' \u0110\xc5\u20ac\xa3')
def test_linebreaks(self):
self.assertEqual(linebreaks_filter('line 1'), '<p>line 1</p>')
self.assertEqual(linebreaks_filter('line 1\nline 2'),
'<p>line 1<br />line 2</p>')
self.assertEqual(linebreaks_filter('line 1\rline 2'),
'<p>line 1<br />line 2</p>')
self.assertEqual(linebreaks_filter('line 1\r\nline 2'),
'<p>line 1<br />line 2</p>')
def test_linebreaksbr(self):
self.assertEqual(linebreaksbr('line 1\nline 2'),
'line 1<br />line 2')
self.assertEqual(linebreaksbr('line 1\rline 2'),
'line 1<br />line 2')
self.assertEqual(linebreaksbr('line 1\r\nline 2'),
'line 1<br />line 2')
def test_removetags(self):
self.assertEqual(removetags('some <b>html</b> with <script>alert'\
'("You smell")</script> disallowed <img /> tags', 'script img'),
'some <b>html</b> with alert("You smell") disallowed tags')
self.assertEqual(striptags('some <b>html</b> with <script>alert'\
'("You smell")</script> disallowed <img /> tags'),
'some html with alert("You smell") disallowed tags')
def test_dictsort(self):
sorted_dicts = dictsort([{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}], 'age')
self.assertEqual([sorted(dict.items()) for dict in sorted_dicts],
[[('age', 18), ('name', 'Jonny B Goode')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 63), ('name', 'Ra Ra Rasputin')]])
# If it gets passed a list of something else different from
# dictionaries it should fail silently
self.assertEqual(dictsort([1, 2, 3], 'age'), '')
self.assertEqual(dictsort('Hello!', 'age'), '')
self.assertEqual(dictsort({'a': 1}, 'age'), '')
self.assertEqual(dictsort(1, 'age'), '')
def test_dictsortreversed(self):
sorted_dicts = dictsortreversed([{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age')
self.assertEqual([sorted(dict.items()) for dict in sorted_dicts],
[[('age', 63), ('name', 'Ra Ra Rasputin')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 18), ('name', 'Jonny B Goode')]])
# If it gets passed a list of something else different from
# dictionaries it should fail silently
self.assertEqual(dictsortreversed([1, 2, 3], 'age'), '')
self.assertEqual(dictsortreversed('Hello!', 'age'), '')
self.assertEqual(dictsortreversed({'a': 1}, 'age'), '')
self.assertEqual(dictsortreversed(1, 'age'), '')
def test_first(self):
self.assertEqual(first([0,1,2]), 0)
self.assertEqual(first(''), '')
self.assertEqual(first('test'), 't')
def test_join(self):
self.assertEqual(join([0,1,2], 'glue'), '0glue1glue2')
def test_length(self):
self.assertEqual(length('1234'), 4)
self.assertEqual(length([1,2,3,4]), 4)
self.assertEqual(length_is([], 0), True)
self.assertEqual(length_is([], 1), False)
self.assertEqual(length_is('a', 1), True)
self.assertEqual(length_is('a', 10), False)
def test_slice(self):
self.assertEqual(slice_filter('abcdefg', '0'), '')
self.assertEqual(slice_filter('abcdefg', '1'), 'a')
self.assertEqual(slice_filter('abcdefg', '-1'), 'abcdef')
self.assertEqual(slice_filter('abcdefg', '1:2'), 'b')
self.assertEqual(slice_filter('abcdefg', '1:3'), 'bc')
self.assertEqual(slice_filter('abcdefg', '0::2'), 'aceg')
def test_unordered_list(self):
self.assertEqual(unordered_list(['item 1', 'item 2']),
'\t<li>item 1</li>\n\t<li>item 2</li>')
self.assertEqual(unordered_list(['item 1', ['item 1.1']]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>')
self.assertEqual(
unordered_list(['item 1', ['item 1.1', 'item1.2'], 'item 2']),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t\t<li>item1.2'\
'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>')
self.assertEqual(
unordered_list(['item 1', ['item 1.1', ['item 1.1.1',
['item 1.1.1.1']]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'\
'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'\
'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(
['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]),
'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>'\
'Lawrence</li>\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>'\
'\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>')
@python_2_unicode_compatible
class ULItem(object):
def __init__(self, title):
self.title = title
def __str__(self):
return 'ulitem-%s' % str(self.title)
a = ULItem('a')
b = ULItem('b')
self.assertEqual(unordered_list([a,b]),
'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
# Old format for unordered lists should still work
self.assertEqual(unordered_list(['item 1', []]), '\t<li>item 1</li>')
self.assertEqual(unordered_list(['item 1', [['item 1.1', []]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(['item 1', [['item 1.1', []],
['item 1.2', []]]]), '\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1'\
'</li>\n\t\t<li>item 1.2</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(['States', [['Kansas', [['Lawrence',
[]], ['Topeka', []]]], ['Illinois', []]]]), '\t<li>States\n\t'\
'<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>Lawrence</li>'\
'\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>\n\t\t<li>'\
'Illinois</li>\n\t</ul>\n\t</li>')
def test_add(self):
self.assertEqual(add('1', '2'), 3)
def test_get_digit(self):
self.assertEqual(get_digit(123, 1), 3)
self.assertEqual(get_digit(123, 2), 2)
self.assertEqual(get_digit(123, 3), 1)
self.assertEqual(get_digit(123, 4), 0)
self.assertEqual(get_digit(123, 0), 123)
self.assertEqual(get_digit('xyz', 0), 'xyz')
def test_date(self):
# real testing of date() is in dateformat.py
self.assertEqual(date(datetime.datetime(2005, 12, 29), "d F Y"),
'29 December 2005')
self.assertEqual(date(datetime.datetime(2005, 12, 29), r'jS \o\f F'),
'29th of December')
def test_time(self):
# real testing of time() is done in dateformat.py
self.assertEqual(time(datetime.time(13), "h"), '01')
self.assertEqual(time(datetime.time(0), "h"), '12')
def test_timesince(self):
# real testing is done in timesince.py, where we can provide our own 'now'
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(
timesince_filter(datetime.datetime.now() - datetime.timedelta(1)),
'1\xa0day')
self.assertEqual(
timesince_filter(datetime.datetime(2005, 12, 29),
datetime.datetime(2005, 12, 30)),
'1\xa0day')
def test_timeuntil(self):
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(
timeuntil_filter(datetime.datetime.now() + datetime.timedelta(1, 1)),
'1\xa0day')
self.assertEqual(
timeuntil_filter(datetime.datetime(2005, 12, 30),
datetime.datetime(2005, 12, 29)),
'1\xa0day')
def test_default(self):
self.assertEqual(default("val", "default"), 'val')
self.assertEqual(default(None, "default"), 'default')
self.assertEqual(default('', "default"), 'default')
def test_if_none(self):
self.assertEqual(default_if_none("val", "default"), 'val')
self.assertEqual(default_if_none(None, "default"), 'default')
self.assertEqual(default_if_none('', "default"), '')
def test_divisibleby(self):
self.assertEqual(divisibleby(4, 2), True)
self.assertEqual(divisibleby(4, 3), False)
def test_yesno(self):
self.assertEqual(yesno(True), 'yes')
self.assertEqual(yesno(False), 'no')
self.assertEqual(yesno(None), 'maybe')
self.assertEqual(yesno(True, 'certainly,get out of town,perhaps'),
'certainly')
self.assertEqual(yesno(False, 'certainly,get out of town,perhaps'),
'get out of town')
self.assertEqual(yesno(None, 'certainly,get out of town,perhaps'),
'perhaps')
self.assertEqual(yesno(None, 'certainly,get out of town'),
'get out of town')
def test_filesizeformat(self):
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(filesizeformat(1023), '1023\xa0bytes')
self.assertEqual(filesizeformat(1024), '1.0\xa0KB')
self.assertEqual(filesizeformat(10*1024), '10.0\xa0KB')
self.assertEqual(filesizeformat(1024*1024-1), '1024.0\xa0KB')
self.assertEqual(filesizeformat(1024*1024), '1.0\xa0MB')
self.assertEqual(filesizeformat(1024*1024*50), '50.0\xa0MB')
self.assertEqual(filesizeformat(1024*1024*1024-1), '1024.0\xa0MB')
self.assertEqual(filesizeformat(1024*1024*1024), '1.0\xa0GB')
self.assertEqual(filesizeformat(1024*1024*1024*1024), '1.0\xa0TB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024), '1.0\xa0PB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024*2000),
'2000.0\xa0PB')
self.assertEqual(filesizeformat(complex(1,-1)), '0\xa0bytes')
self.assertEqual(filesizeformat(""), '0\xa0bytes')
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"),
'0\xa0bytes')
def test_pluralize(self):
self.assertEqual(pluralize(1), '')
self.assertEqual(pluralize(0), 's')
self.assertEqual(pluralize(2), 's')
self.assertEqual(pluralize([1]), '')
self.assertEqual(pluralize([]), 's')
self.assertEqual(pluralize([1,2,3]), 's')
self.assertEqual(pluralize(1,'es'), '')
self.assertEqual(pluralize(0,'es'), 'es')
self.assertEqual(pluralize(2,'es'), 'es')
self.assertEqual(pluralize(1,'y,ies'), 'y')
self.assertEqual(pluralize(0,'y,ies'), 'ies')
self.assertEqual(pluralize(2,'y,ies'), 'ies')
self.assertEqual(pluralize(0,'y,ies,error'), '')
def test_phone2numeric(self):
self.assertEqual(phone2numeric_filter('0800 flowers'), '0800 3569377')
def test_non_string_input(self):
# Filters shouldn't break if passed non-strings
self.assertEqual(addslashes(123), '123')
self.assertEqual(linenumbers(123), '1. 123')
self.assertEqual(lower(123), '123')
self.assertEqual(make_list(123), ['1', '2', '3'])
self.assertEqual(slugify(123), '123')
self.assertEqual(title(123), '123')
self.assertEqual(truncatewords(123, 2), '123')
self.assertEqual(upper(123), '123')
self.assertEqual(urlencode(123), '123')
self.assertEqual(urlize(123), '123')
self.assertEqual(urlizetrunc(123, 1), '123')
self.assertEqual(wordcount(123), 1)
self.assertEqual(wordwrap(123, 2), '123')
self.assertEqual(ljust('123', 4), '123 ')
self.assertEqual(rjust('123', 4), ' 123')
self.assertEqual(center('123', 5), ' 123 ')
self.assertEqual(center('123', 6), ' 123 ')
self.assertEqual(cut(123, '2'), '13')
self.assertEqual(escape(123), '123')
self.assertEqual(linebreaks_filter(123), '<p>123</p>')
self.assertEqual(linebreaksbr(123), '123')
self.assertEqual(removetags(123, 'a'), '123')
self.assertEqual(striptags(123), '123')
class DefaultFiltersI18NTests(TransRealMixin, TestCase):
def test_localized_filesizeformat(self):
# NOTE: \xa0 avoids wrapping between value and unit
with self.settings(USE_L10N=True):
with translation.override('de', deactivate=True):
self.assertEqual(filesizeformat(1023), '1023\xa0Bytes')
self.assertEqual(filesizeformat(1024), '1,0\xa0KB')
self.assertEqual(filesizeformat(10*1024), '10,0\xa0KB')
self.assertEqual(filesizeformat(1024*1024-1), '1024,0\xa0KB')
self.assertEqual(filesizeformat(1024*1024), '1,0\xa0MB')
self.assertEqual(filesizeformat(1024*1024*50), '50,0\xa0MB')
self.assertEqual(filesizeformat(1024*1024*1024-1), '1024,0\xa0MB')
self.assertEqual(filesizeformat(1024*1024*1024), '1,0\xa0GB')
self.assertEqual(filesizeformat(1024*1024*1024*1024), '1,0\xa0TB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024),
'1,0\xa0PB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024*2000),
'2000,0\xa0PB')
self.assertEqual(filesizeformat(complex(1,-1)), '0\xa0Bytes')
self.assertEqual(filesizeformat(""), '0\xa0Bytes')
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"),
'0\xa0Bytes')
|
stonier/ecto
|
refs/heads/devel
|
test/scripts/test_throw.py
|
4
|
#!/usr/bin/env python
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import sys, ecto
import ecto.ecto_test as ecto_test
def makeplasm(N):
plasm = ecto.Plasm()
gen = ecto_test.Generate(start=1, step=1)
thrower = ecto_test.ThrowAfter(N=N)
mult = ecto_test.Multiply()
plasm.connect(gen[:] >> thrower[:],
thrower[:] >> mult[:])
return plasm
def do_one_impl(Sched, nthreads, niter):
print "\n"*5, "*"*80
print Sched, nthreads, niter
p = makeplasm(niter)
s = Sched(p)
try:
s.execute(niter=niter+10)
assert False, "that should have thrown"
except ecto.EctoException, e:
print "okay:", e
def do_one(nthreads, niter):
for S in [ecto.Scheduler]:
do_one_impl(S, nthreads, niter)
for j in range(ecto.test.iterations):
for q in range(10):
for nthreads in range(1, 10):
for niter in range(1, 100):
do_one(nthreads, niter)
|
Anonymous-X6/django
|
refs/heads/master
|
tests/i18n/urls.py
|
205
|
from __future__ import unicode_literals
from django.conf.urls.i18n import i18n_patterns
from django.http import HttpResponse, StreamingHttpResponse
from django.test import ignore_warnings
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.translation import ugettext_lazy as _
# test deprecated version of i18n_patterns() function (with prefix). Remove it
# and convert to list of urls() in Django 1.10
i18n_patterns = ignore_warnings(category=RemovedInDjango110Warning)(i18n_patterns)
urlpatterns = i18n_patterns('',
(r'^simple/$', lambda r: HttpResponse()),
(r'^streaming/$', lambda r: StreamingHttpResponse([_("Yes"), "/", _("No")])),
)
|
bikong2/django
|
refs/heads/master
|
django/contrib/gis/management/commands/ogrinspect.py
|
369
|
import argparse
from django.contrib.gis import gdal
from django.core.management.base import BaseCommand, CommandError
from django.utils.inspect import get_func_args
class LayerOptionAction(argparse.Action):
"""
Custom argparse action for the `ogrinspect` `layer_key` keyword option
which may be an integer or a string.
"""
def __call__(self, parser, namespace, value, option_string=None):
try:
setattr(namespace, self.dest, int(value))
except ValueError:
setattr(namespace, self.dest, value)
class ListOptionAction(argparse.Action):
"""
Custom argparse action for `ogrinspect` keywords that require
a string list. If the string is 'True'/'true' then the option
value will be a boolean instead.
"""
def __call__(self, parser, namespace, value, option_string=None):
if value.lower() == 'true':
setattr(namespace, self.dest, True)
else:
setattr(namespace, self.dest, value.split(','))
class Command(BaseCommand):
help = ('Inspects the given OGR-compatible data source (e.g., a shapefile) and outputs\n'
'a GeoDjango model with the given model name. For example:\n'
' ./manage.py ogrinspect zipcode.shp Zipcode')
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument('data_source', help='Path to the data source.')
parser.add_argument('model_name', help='Name of the model to create.')
parser.add_argument('--blank', dest='blank',
action=ListOptionAction, default=False,
help='Use a comma separated list of OGR field names to add '
'the `blank=True` option to the field definition. Set to `true` '
'to apply to all applicable fields.')
parser.add_argument('--decimal', dest='decimal',
action=ListOptionAction, default=False,
help='Use a comma separated list of OGR float fields to '
'generate `DecimalField` instead of the default '
'`FloatField`. Set to `true` to apply to all OGR float fields.')
parser.add_argument('--geom-name', dest='geom_name', default='geom',
help='Specifies the model name for the Geometry Field '
'(defaults to `geom`)')
parser.add_argument('--layer', dest='layer_key',
action=LayerOptionAction, default=0,
help='The key for specifying which layer in the OGR data '
'source to use. Defaults to 0 (the first layer). May be '
'an integer or a string identifier for the layer.')
parser.add_argument('--multi-geom', action='store_true',
dest='multi_geom', default=False,
help='Treat the geometry in the data source as a geometry collection.')
parser.add_argument('--name-field', dest='name_field',
help='Specifies a field name to return for the `__unicode__`/`__str__` function.')
parser.add_argument('--no-imports', action='store_false', dest='imports', default=True,
help='Do not include `from django.contrib.gis.db import models` statement.')
parser.add_argument('--null', dest='null', action=ListOptionAction, default=False,
help='Use a comma separated list of OGR field names to add '
'the `null=True` option to the field definition. Set to `true` '
'to apply to all applicable fields.')
parser.add_argument('--srid', dest='srid',
help='The SRID to use for the Geometry Field. If it can be '
'determined, the SRID of the data source is used.')
parser.add_argument('--mapping', action='store_true', dest='mapping',
help='Generate mapping dictionary for use with `LayerMapping`.')
def handle(self, *args, **options):
data_source, model_name = options.pop('data_source'), options.pop('model_name')
if not gdal.HAS_GDAL:
raise CommandError('GDAL is required to inspect geospatial data sources.')
# Getting the OGR DataSource from the string parameter.
try:
ds = gdal.DataSource(data_source)
except gdal.GDALException as msg:
raise CommandError(msg)
# Returning the output of ogrinspect with the given arguments
# and options.
from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
# Filter options to params accepted by `_ogrinspect`
ogr_options = {k: v for k, v in options.items()
if k in get_func_args(_ogrinspect) and v is not None}
output = [s for s in _ogrinspect(ds, model_name, **ogr_options)]
if options['mapping']:
# Constructing the keyword arguments for `mapping`, and
# calling it on the data source.
kwargs = {'geom_name': options['geom_name'],
'layer_key': options['layer_key'],
'multi_geom': options['multi_geom'],
}
mapping_dict = mapping(ds, **kwargs)
# This extra legwork is so that the dictionary definition comes
# out in the same order as the fields in the model definition.
rev_mapping = {v: k for k, v in mapping_dict.items()}
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
'%s_mapping = {' % model_name.lower()])
output.extend(" '%s' : '%s'," % (
rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields
)
output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}'])
return '\n'.join(output) + '\n'
|
danielhrisca/asammdf
|
refs/heads/master
|
asammdf/gui/widgets/channel_group_info.py
|
1
|
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
from PyQt5 import QtCore, QtGui, QtWidgets
from ...blocks.utils import csv_bytearray2hex
from ..ui import resource_rc as resource_rc
from ..ui.channel_group_info_widget import Ui_ChannelGroupInfo
from ..widgets.list_item import ListItem
class ChannelGroupInfoWidget(Ui_ChannelGroupInfo, QtWidgets.QWidget):
def __init__(self, mdf, group, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setupUi(self)
channel_group = group.channel_group
self.mdf = mdf
self.group = group
self.channel_group_label.setText(channel_group.metadata())
if hasattr(channel_group, "acq_source") and channel_group.acq_source:
self.source_label.setText(channel_group.acq_source.metadata())
items = []
for i, ch in enumerate(group.channels):
item = ListItem(entry=i, name=ch.name)
item.setText(item.name)
items.append(item)
items.sort(key=lambda x: x.name)
for item in items:
self.channels.addItem(item)
self.scroll.valueChanged.connect(self._display)
self.channels.currentRowChanged.connect(self.select_channel)
self.byte_count = 0
self.byte_offset = 0
self.position = 0
self.index_size = len(str(channel_group.cycles_nr))
self.cycles = channel_group.cycles_nr
if self.mdf.version >= "4.00":
self.record_size = (
channel_group.samples_byte_nr + channel_group.invalidation_bytes_nr
)
else:
self.record_size = channel_group.samples_byte_nr
self.wrap.stateChanged.connect(self.wrap_changed)
self._display(self.position)
def wrap_changed(self):
if self.wrap.checkState() == QtCore.Qt.Checked:
self.display.setWordWrapMode(QtGui.QTextOption.WordWrap)
else:
self.display.setWordWrapMode(QtGui.QTextOption.NoWrap)
self._display(self.position)
def select_channel(self, row):
item = self.channels.item(row)
channel = self.group.channels[item.entry]
self.byte_offset = channel.byte_offset
byte_count = channel.bit_count + channel.bit_offset
if byte_count % 8:
byte_count += 8 - (byte_count % 8)
self.byte_count = byte_count // 8
self._display(self.position)
def _display(self, position):
self.display.clear()
self.position = position
record_offset = max(0, position * self.cycles // self.scroll.maximum())
record_end = max(0, position * self.cycles // self.scroll.maximum() + 100)
record_count = record_end - record_offset
data = b"".join(
e[0]
for e in self.mdf._load_data(
self.group, record_offset=record_offset, record_count=record_count
)
)
data = pd.Series(list(np.frombuffer(data, dtype=f"({self.record_size},)u1")))
data = list(csv_bytearray2hex(data))
lines = [
"""<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">
<html><head><meta name="qrichtext" content="1" /><style type="text/css">
p, li { white-space: pre-wrap; }
</style></head><body style=" font-size:8pt; font-style:normal;">"""
]
if self.byte_count == 0:
template = f'<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-weight:600; color:#61b2e2;">{{index: >{self.index_size}}}: </span>{{line}}</p>'
for i, l in enumerate(data, record_offset):
lines.append(template.format(index=i, line=l))
else:
template = f'<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-weight:600; color:#61b2e2;">{{index: >{self.index_size}}}: </span>{{start}}<span style=" font-weight:600; color:#ff5500;">{{middle}}</span>{{end}}</p>'
for i, l in enumerate(data, record_offset):
lines.append(
template.format(
index=i,
start=l[: self.byte_offset * 3],
middle=l[
self.byte_offset * 3 : self.byte_offset * 3
+ self.byte_count * 3
],
end=l[self.byte_offset * 3 + self.byte_count * 3 :],
)
)
self.display.appendHtml("\n".join(lines))
if position == 0:
self.display.verticalScrollBar().setSliderPosition(0)
elif position == self.scroll.maximum():
self.display.verticalScrollBar().setSliderPosition(
self.display.verticalScrollBar().maximum()
)
|
lexyan/SickBeard
|
refs/heads/master
|
sickbeard/notifiers/tweet.py
|
14
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
from sickbeard import logger, common
from sickbeard.exceptions import ex
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl #@UnusedImport
except:
from cgi import parse_qsl #@Reimport
import lib.oauth2 as oauth
import lib.pythontwitter as twitter
class TwitterNotifier:
consumer_key = "vHHtcB6WzpWDG6KYlBMr8g"
consumer_secret = "zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E"
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
def notify_snatch(self, ep_name):
if sickbeard.TWITTER_NOTIFY_ONSNATCH:
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH]+': '+ep_name)
def notify_download(self, ep_name):
if sickbeard.TWITTER_NOTIFY_ONDOWNLOAD:
self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD]+': '+ep_name)
def test_notify(self):
return self._notifyTwitter("This is a test notification from Sick Beard", force=True)
def _get_authorization(self):
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
logger.log('Requesting temp token from Twitter')
resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
logger.log('Invalid respond from Twitter requesting temp token: %s' % resp['status'])
else:
request_token = dict(parse_qsl(content))
sickbeard.TWITTER_USERNAME = request_token['oauth_token']
sickbeard.TWITTER_PASSWORD = request_token['oauth_token_secret']
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token['oauth_token']
def _get_credentials(self, key):
request_token = {}
request_token['oauth_token'] = sickbeard.TWITTER_USERNAME
request_token['oauth_token_secret'] = sickbeard.TWITTER_PASSWORD
request_token['oauth_callback_confirmed'] = 'true'
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
token.set_verifier(key)
logger.log('Generating and signing request for an access token using key '+key)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
logger.log('oauth_consumer: '+str(oauth_consumer))
oauth_client = oauth.Client(oauth_consumer, token)
logger.log('oauth_client: '+str(oauth_client))
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
logger.log('resp, content: '+str(resp)+','+str(content))
access_token = dict(parse_qsl(content))
logger.log('access_token: '+str(access_token))
logger.log('resp[status] = '+str(resp['status']))
if resp['status'] != '200':
logger.log('The request for a token with did not succeed: '+str(resp['status']), logger.ERROR)
return False
else:
logger.log('Your Twitter Access Token key: %s' % access_token['oauth_token'])
logger.log('Access Token secret: %s' % access_token['oauth_token_secret'])
sickbeard.TWITTER_USERNAME = access_token['oauth_token']
sickbeard.TWITTER_PASSWORD = access_token['oauth_token_secret']
return True
def _send_tweet(self, message=None):
username=self.consumer_key
password=self.consumer_secret
access_token_key=sickbeard.TWITTER_USERNAME
access_token_secret=sickbeard.TWITTER_PASSWORD
logger.log(u"Sending tweet: "+message)
api = twitter.Api(username, password, access_token_key, access_token_secret)
try:
api.PostUpdate(message)
except Exception, e:
logger.log(u"Error Sending Tweet: "+ex(e), logger.ERROR)
return False
return True
def _notifyTwitter(self, message='', force=False):
prefix = sickbeard.TWITTER_PREFIX
if not sickbeard.USE_TWITTER and not force:
return False
return self._send_tweet(prefix+": "+message)
notifier = TwitterNotifier
|
wtud/tsap
|
refs/heads/master
|
tribler-prototype/tsap/service/SettingsManager.py
|
1
|
# coding: utf-8
# Written by Wendo Sabée
# Manages local settings. SETTINGS ARE NOT SAVED LOCALLY BETWEEN SESSIONS (for now)!
import os
import ast
# Setup logger
import logging
_logger = logging.getLogger(__name__)
from Tribler.Category.Category import Category
from DownloadManager import DownloadManager
from BaseManager import BaseManager
ENVIRONMENT_SETTINGS_PREFIX = "TRIBLER_SETTING_"
class SettingsManager(BaseManager):
def _connect(self):
"""
Load settings from environment variables.
:return: Nothing.
"""
if not self._connected:
self._connected = True
self._load_settings_from_env()
else:
raise RuntimeError('SettingsManager already connected')
def _xmlrpc_register(self, xmlrpc):
"""
Register the public functions in this manager with an XML-RPC Manager.
:param xmlrpc: The XML-RPC Manager it should register to.
:return: Nothing.
"""
xmlrpc.register_function(self.get_thumbs_directory, "settings.get_thumbs_directory")
xmlrpc.register_function(self.get_family_filter, "settings.get_family_filter")
xmlrpc.register_function(self.set_family_filter, "settings.set_family_filter")
xmlrpc.register_function(self.set_max_download, "settings.set_max_download")
xmlrpc.register_function(self.get_max_download, "settings.get_max_download")
xmlrpc.register_function(self.set_max_download, "settings.set_max_upload")
xmlrpc.register_function(self.get_max_download, "settings.get_max_upload")
def _load_settings_from_env(self):
"""
Settings are passed to the Tribler process on startup with the TRIBLER_SETTING_* environment variables. This
function iterates over the environment variables and calls the setter functions associated with any found
variables.
:return: Nothing.
"""
def get_value(value):
if value.lower() == "true":
return True
elif value.lower() == "false":
return False
else:
return ast.literal_eval(value)
for envkey in os.environ.keys():
if envkey.startswith(ENVIRONMENT_SETTINGS_PREFIX):
try:
function_name = "set_%s" % envkey[len(ENVIRONMENT_SETTINGS_PREFIX):].lower()
function_value = os.environ[envkey]
_logger.info("Setting preset setting with %s(%s)" % (function_name, function_value))
# Call setter
getattr(self, function_name)(get_value(function_value))
except Exception, e:
_logger.warn("Could not set settings for key %s: %s" % (envkey, e.args))
def get_thumbs_directory(self):
"""
Returns the collected_torrent_files directory that contains the folders containing collected thumbnail data.
These folders have the format of .../collected_torrent_files/thumbs-[INFOHASH]/[CONTENTHASH]/, where [INFOHASH]
is the infohash of the torrent file, and [CONTENTHASH] a hash belonging to the thumbnail torrent. Each of these
folders has one of multiple image files that can be used as thumbnails.
:return: Path to collected_torrent_files directory.
"""
return self._session.get_torrent_collecting_dir()
def get_family_filter(self):
"""
Get the current state of the family filter.
:return: Boolean indicating state.
"""
catobj = Category.getInstance()
return catobj.family_filter_enabled()
def set_family_filter(self, enable):
"""
Set the state of the family filter.
:param enable: Boolean with the new state.
:return: Boolean indicating success.
"""
try:
Category.getInstance().set_family_filter(enable)
return True
except:
return False
def set_max_download(self, speed):
"""
Sets the maximum download speed in the rate limiter.
:param speed: The maximum speed in KiB/s
:return: Boolean indicating success.
"""
try:
DownloadManager.getInstance().set_max_download(speed)
return True
except:
return False
def get_max_download(self):
"""
Gets the maximum download speed from the rate limiter
:return: Maximum download speed in KiB/s
"""
try:
return DownloadManager.getInstance().get_max_download()
except:
return False
def set_max_upload(self, speed):
"""
Sets the maximum upload speed in the rate limiter.
:param speed: The maximum speed in KiB/s
:return: Boolean indicating success.
"""
try:
DownloadManager.getInstance().set_max_upload(speed)
return True
except:
return False
def get_max_upload(self):
"""
Gets the maximum upload speed from the rate limiter
:return: Maximum upload speed in KiB/s
"""
try:
return DownloadManager.getInstance().get_max_upload()
except:
return False
|
dandeliondeathray/niancatscala
|
refs/heads/master
|
slackrest/bin/mock_chatbot.py
|
2
|
import tornado.web
import tornado.websocket
import tornado.httpserver
import tornado.ioloop
import json
test_event_handler = None
loop = tornado.ioloop.IOLoop.current()
class TestEventHandler(tornado.websocket.WebSocketHandler):
def open(self):
global test_event_handler
test_event_handler = self
def on_message(self, message):
pass
def on_close(self):
pass
def write_event(type, message=None):
event = {'event': type}
if message:
event['message'] = message
if test_event_handler:
loop.add_callback(test_event_handler.write_message, event)
class ReplyHandler(tornado.web.RequestHandler):
def get(self):
self.write(json.dumps([{'response_type': 'reply', 'message': 'Some reply'}]))
self.finish()
write_event('reply')
class NotificationHandler(tornado.web.RequestHandler):
def get(self):
self.write(json.dumps([{'response_type': 'notification', 'message': 'Some notification'}]))
self.finish()
write_event('notification')
class MakeAPostHandler(tornado.web.RequestHandler):
def post(self):
self.write(json.dumps([{'response_type': 'reply', 'message': 'Make a post'}]))
self.finish()
write_event('reply')
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r'/reply', ReplyHandler),
(r'/notify', NotificationHandler),
(r'/makeapost', MakeAPostHandler),
(r'/test', TestEventHandler)
]
settings = {
'template_path': 'templates'
}
tornado.web.Application.__init__(self, handlers, **settings)
if __name__ == '__main__':
ws_app = Application()
server = tornado.httpserver.HTTPServer(ws_app)
server.listen(80)
tornado.ioloop.IOLoop.instance().start()
|
coolbombom/CouchPotatoServer
|
refs/heads/master
|
libs/requests/packages/charade/big5prober.py
|
2930
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import Big5SMModel
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(Big5SMModel)
self._mDistributionAnalyzer = Big5DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "Big5"
|
andfoy/margffoy-tuay-server
|
refs/heads/master
|
env/lib/python2.7/site-packages/future-0.14.3-py2.7.egg/future/types/newdict.py
|
70
|
"""
A dict subclass for Python 2 that behaves like Python 3's dict
Example use:
>>> from builtins import dict
>>> d1 = dict() # instead of {} for an empty dict
>>> d2 = dict(key1='value1', key2='value2')
The keys, values and items methods now return iterators on Python 2.x
(with set-like behaviour on Python 2.7).
>>> for d in (d1, d2):
... assert not isinstance(d.keys(), list)
... assert not isinstance(d.values(), list)
... assert not isinstance(d.items(), list)
"""
import sys
from future.utils import with_metaclass
from future.types.newobject import newobject
_builtin_dict = dict
ver = sys.version_info[:2]
class BaseNewDict(type):
def __instancecheck__(cls, instance):
if cls == newdict:
return isinstance(instance, _builtin_dict)
else:
return issubclass(instance.__class__, cls)
class newdict(with_metaclass(BaseNewDict, _builtin_dict)):
"""
A backport of the Python 3 dict object to Py2
"""
def items(self):
"""
On Python 2.7+:
D.items() -> a set-like object providing a view on D's items
On Python 2.6:
D.items() -> an iterator over D's items
"""
if ver == (2, 7):
return self.viewitems()
elif ver == (2, 6):
return self.iteritems()
elif ver >= (3, 0):
return self.items()
def keys(self):
"""
On Python 2.7+:
D.keys() -> a set-like object providing a view on D's keys
On Python 2.6:
D.keys() -> an iterator over D's keys
"""
if ver == (2, 7):
return self.viewkeys()
elif ver == (2, 6):
return self.iterkeys()
elif ver >= (3, 0):
return self.keys()
def values(self):
"""
On Python 2.7+:
D.values() -> a set-like object providing a view on D's values
On Python 2.6:
D.values() -> an iterator over D's values
"""
if ver == (2, 7):
return self.viewvalues()
elif ver == (2, 6):
return self.itervalues()
elif ver >= (3, 0):
return self.values()
def __new__(cls, *args, **kwargs):
"""
dict() -> new empty dictionary
dict(mapping) -> new dictionary initialized from a mapping object's
(key, value) pairs
dict(iterable) -> new dictionary initialized as if via:
d = {}
for k, v in iterable:
d[k] = v
dict(**kwargs) -> new dictionary initialized with the name=value pairs
in the keyword argument list. For example: dict(one=1, two=2)
"""
if len(args) == 0:
return super(newdict, cls).__new__(cls)
elif type(args[0]) == newdict:
value = args[0]
else:
value = args[0]
return super(newdict, cls).__new__(cls, value)
def __native__(self):
"""
Hook for the future.utils.native() function
"""
return dict(self)
__all__ = ['newdict']
|
rahul67/hue
|
refs/heads/master
|
desktop/core/ext-py/kazoo-2.0/kazoo/protocol/__init__.py
|
9480
|
#
|
disqus/djangopypi
|
refs/heads/master
|
djangopypi/views/__init__.py
|
2
|
from logging import getLogger
from django.conf import settings
from django.http import HttpResponseNotAllowed
from djangopypi.decorators import csrf_exempt
from djangopypi.models import Package, Release
from djangopypi.views.xmlrpc import parse_xmlrpc_request
log = getLogger('djangopypi.views')
@csrf_exempt
def root(request, fallback_view=None, **kwargs):
""" Root view of the package index, handle incoming actions from distutils
or redirect to a more user friendly view """
if request.method == 'POST':
if request.META['CONTENT_TYPE'] == 'text/xml':
log.debug('XMLRPC request received')
return parse_xmlrpc_request(request)
log.debug('Distutils request received')
action = request.POST.get(':action','')
else:
action = request.GET.get(':action','')
if not action:
log.debug('No action in root view')
if fallback_view is None:
fallback_view = settings.DJANGOPYPI_FALLBACK_VIEW
return fallback_view(request, **kwargs)
if not action in settings.DJANGOPYPI_ACTION_VIEWS:
log.error('Invalid action encountered: %s' % (action,))
return HttpResponseNotAllowed(settings.DJANGOPYPI_ACTION_VIEWS.keys())
log.debug('Applying configured action view for %s' % (action,))
return settings.DJANGOPYPI_ACTION_VIEWS[action](request, **kwargs)
|
Opentrons/labware
|
refs/heads/master
|
robot-server/tests/service/session/session_types/protocol/execution/test_protocol_runner.py
|
2
|
import os
import sys
from pathlib import Path
from unittest.mock import MagicMock, patch, PropertyMock
import pytest
from opentrons.api import Session
from opentrons.hardware_control import ThreadedAsyncLock
from robot_server.service.protocol.protocol import UploadedProtocol, \
UploadedProtocolMeta, FileMeta
from robot_server.service.session.session_types.protocol.execution. \
protocol_runner import ProtocolRunnerContext, ProtocolRunner
@pytest.fixture
def mock_os_chdir():
with patch.object(os, "chdir") as p:
yield p
@pytest.fixture
def uploaded_protocol_meta():
mock_temp_dir = MagicMock()
type(mock_temp_dir).name = PropertyMock(return_value="some_path")
return UploadedProtocolMeta(identifier="None",
protocol_file=FileMeta(
path=Path("/some_path/abc.py"),
content_hash=""
),
directory=mock_temp_dir
)
@pytest.fixture
def mock_protocol(uploaded_protocol_meta):
m = MagicMock(spec=UploadedProtocol)
type(m).meta = PropertyMock(return_value=uploaded_protocol_meta)
m.get_contents.return_value = "my contents"
return m
@pytest.fixture
def mock_context():
with patch('robot_server.service.session.session_types.protocol'
'.execution.protocol_runner.ProtocolRunnerContext') as p:
yield p
@pytest.fixture
def protocol_runner(mock_protocol, loop, hardware):
return ProtocolRunner(protocol=mock_protocol,
loop=loop,
hardware=hardware,
motion_lock=ThreadedAsyncLock())
def test_load(protocol_runner, mock_context,
uploaded_protocol_meta, mock_protocol):
with patch.object(Session, "build_and_prep") as mock:
protocol_runner.load()
mock_context.assert_called_once()
mock.assert_called_once_with(
name=uploaded_protocol_meta.protocol_file.path.name,
contents=mock_protocol.get_contents(),
hardware=protocol_runner._hardware,
loop=protocol_runner._loop,
broker=protocol_runner._broker,
motion_lock=protocol_runner._motion_lock,
extra_labware={})
@pytest.mark.parametrize(argnames="func",
argvalues=[ProtocolRunner.run,
ProtocolRunner.simulate,
ProtocolRunner.cancel,
ProtocolRunner.pause,
ProtocolRunner.resume])
def test_no_session_will_not_raise(func, protocol_runner, mock_context):
func(protocol_runner)
mock_context.assert_not_called()
@pytest.mark.parametrize(argnames="func,target",
argvalues=[[ProtocolRunner.run, "run"],
[ProtocolRunner.simulate, "refresh"],
[ProtocolRunner.cancel, "stop"],
[ProtocolRunner.pause, "pause"],
[ProtocolRunner.resume, "resume"]])
def test_session_calls(func, target, protocol_runner, mock_context):
protocol_runner._session = MagicMock()
func(protocol_runner)
getattr(protocol_runner._session, target).assert_called_once()
def test_listeners(protocol_runner):
results1 = []
results2 = []
protocol_runner.add_listener(results1.append)
protocol_runner.add_listener(results2.append)
protocol_runner._on_message(1)
protocol_runner._on_message(2)
assert results1 == [1, 2] == results2
protocol_runner.remove_listener(results2.append)
protocol_runner._on_message(3)
assert results1 == [1, 2, 3]
assert results2 == [1, 2]
def test_protocol_runner_context(mock_protocol, uploaded_protocol_meta,
mock_os_chdir):
with ProtocolRunnerContext(mock_protocol) as context:
# We are changing directory to the temp directory
mock_os_chdir.assert_called_with(
uploaded_protocol_meta.directory.name
)
# Adding it to sys.path
assert uploaded_protocol_meta.directory.name in sys.path
# Done with context manager. Let's make sure we clean up
assert uploaded_protocol_meta.directory.name not in sys.path
assert sys.path == context._path
mock_os_chdir.assert_called_with(context._cwd)
|
miurahr/seahub
|
refs/heads/master
|
seahub/wiki/models.py
|
6
|
from django.db import models
from seahub.base.fields import LowerCaseCharField
class WikiDoesNotExist(Exception):
pass
class WikiPageMissing(Exception):
pass
class PersonalWikiManager(models.Manager):
def save_personal_wiki(self, username, repo_id):
"""
Create or update group wiki.
"""
try:
wiki = self.get(username=username)
wiki.repo_id = repo_id
except self.model.DoesNotExist:
wiki = self.model(username=username, repo_id=repo_id)
wiki.save(using=self._db)
return wiki
class PersonalWiki(models.Model):
username = LowerCaseCharField(max_length=255, unique=True)
repo_id = models.CharField(max_length=36)
objects = PersonalWikiManager()
class GroupWikiManager(models.Manager):
def save_group_wiki(self, group_id, repo_id):
"""
Create or update group wiki.
"""
try:
groupwiki = self.get(group_id=group_id)
groupwiki.repo_id = repo_id
except self.model.DoesNotExist:
groupwiki = self.model(group_id=group_id, repo_id=repo_id)
groupwiki.save(using=self._db)
return groupwiki
class GroupWiki(models.Model):
group_id = models.IntegerField(unique=True)
repo_id = models.CharField(max_length=36)
objects = GroupWikiManager()
###### signal handlers
from django.dispatch import receiver
from seahub.signals import repo_deleted
@receiver(repo_deleted)
def remove_personal_wiki(sender, **kwargs):
repo_owner = kwargs['repo_owner']
repo_id = kwargs['repo_id']
PersonalWiki.objects.filter(username=repo_owner, repo_id=repo_id).delete()
|
pchauncey/ansible
|
refs/heads/devel
|
lib/ansible/modules/storage/netapp/netapp_e_hostgroup.py
|
33
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netapp_e_hostgroup
version_added: "2.2"
short_description: Manage NetApp Storage Array Host Groups
author: Kevin Hulquest (@hulquest)
description:
- Create, update or destroy host groups on a NetApp E-Series storage array.
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
required: true
description:
- The ID of the array to manage (as configured on the web services proxy).
state:
required: true
description:
- Whether the specified host group should exist or not.
choices: ['present', 'absent']
name:
required: false
description:
- The name of the host group to manage. Either this or C(id_num) must be supplied.
new_name:
required: false
description:
- specify this when you need to update the name of a host group
id:
required: false
description:
- The id number of the host group to manage. Either this or C(name) must be supplied.
hosts::
required: false
description:
- a list of host names/labels to add to the group
'''
EXAMPLES = '''
- name: Configure Hostgroup
netapp_e_hostgroup:
ssid: "{{ ssid }}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
validate_certs: "{{ netapp_api_validate_certs }}"
state: present
'''
RETURN = '''
clusterRef:
description: The unique identification value for this object. Other objects may use this reference value to refer to the cluster.
returned: always except when state is absent
type: string
sample: "3233343536373839303132333100000000000000"
confirmLUNMappingCreation:
description: If true, indicates that creation of LUN-to-volume mappings should require careful confirmation from the end-user, since such a mapping
will alter the volume access rights of other clusters, in addition to this one.
returned: always
type: boolean
sample: false
hosts:
description: A list of the hosts that are part of the host group after all operations.
returned: always except when state is absent
type: list
sample: ["HostA","HostB"]
id:
description: The id number of the hostgroup
returned: always except when state is absent
type: string
sample: "3233343536373839303132333100000000000000"
isSAControlled:
description: If true, indicates that I/O accesses from this cluster are subject to the storage array's default LUN-to-volume mappings. If false,
indicates that I/O accesses from the cluster are subject to cluster-specific LUN-to-volume mappings.
returned: always except when state is absent
type: boolean
sample: false
label:
description: The user-assigned, descriptive label string for the cluster.
returned: always
type: string
sample: "MyHostGroup"
name:
description: same as label
returned: always except when state is absent
type: string
sample: "MyHostGroup"
protectionInformationCapableAccessMethod:
description: This field is true if the host has a PI capable access method.
returned: always except when state is absent
type: boolean
sample: true
'''
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json"
}
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import open_url
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError as e:
r = e.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
def group_exists(module, id_type, ident, ssid, api_url, user, pwd):
rc, data = get_hostgroups(module, ssid, api_url, user, pwd)
for group in data:
if group[id_type] == ident:
return True, data
else:
continue
return False, data
def get_hostgroups(module, ssid, api_url, user, pwd):
groups = "storage-systems/%s/host-groups" % ssid
url = api_url + groups
try:
rc, data = request(url, headers=HEADERS, url_username=user, url_password=pwd)
return rc, data
except HTTPError as e:
module.fail_json(msg="Failed to get host groups. Id [%s]. Error [%s]." % (ssid, to_native(e)))
def get_hostref(module, ssid, name, api_url, user, pwd):
all_hosts = 'storage-systems/%s/hosts' % ssid
url = api_url + all_hosts
try:
rc, data = request(url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to get hosts. Id [%s]. Error [%s]." % (ssid, to_native(e)))
for host in data:
if host['name'] == name:
return host['hostRef']
else:
continue
module.fail_json(msg="No host with the name %s could be found" % name)
def create_hostgroup(module, ssid, name, api_url, user, pwd, hosts=None):
groups = "storage-systems/%s/host-groups" % ssid
url = api_url + groups
hostrefs = []
if hosts:
for host in hosts:
href = get_hostref(module, ssid, host, api_url, user, pwd)
hostrefs.append(href)
post_data = json.dumps(dict(name=name, hosts=hostrefs))
try:
rc, data = request(url, method='POST', data=post_data, headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to create host group. Id [%s]. Error [%s]." % (ssid, to_native(e)))
return rc, data
def update_hostgroup(module, ssid, name, api_url, user, pwd, hosts=None, new_name=None):
gid = get_hostgroup_id(module, ssid, name, api_url, user, pwd)
groups = "storage-systems/%s/host-groups/%s" % (ssid, gid)
url = api_url + groups
hostrefs = []
if hosts:
for host in hosts:
href = get_hostref(module, ssid, host, api_url, user, pwd)
hostrefs.append(href)
if new_name:
post_data = json.dumps(dict(name=new_name, hosts=hostrefs))
else:
post_data = json.dumps(dict(hosts=hostrefs))
try:
rc, data = request(url, method='POST', data=post_data, headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to update host group. Group [%s]. Id [%s]. Error [%s]." % (gid, ssid,
to_native(e)))
return rc, data
def delete_hostgroup(module, ssid, group_id, api_url, user, pwd):
groups = "storage-systems/%s/host-groups/%s" % (ssid, group_id)
url = api_url + groups
# TODO: Loop through hosts, do mapping to href, make new list to pass to data
try:
rc, data = request(url, method='DELETE', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to delete host group. Group [%s]. Id [%s]. Error [%s]." % (group_id, ssid, to_native(e)))
return rc, data
def get_hostgroup_id(module, ssid, name, api_url, user, pwd):
all_groups = 'storage-systems/%s/host-groups' % ssid
url = api_url + all_groups
rc, data = request(url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
for hg in data:
if hg['name'] == name:
return hg['id']
else:
continue
module.fail_json(msg="A hostgroup with the name %s could not be found" % name)
def get_hosts_in_group(module, ssid, group_name, api_url, user, pwd):
all_groups = 'storage-systems/%s/host-groups' % ssid
g_url = api_url + all_groups
try:
g_rc, g_data = request(g_url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(
msg="Failed in first step getting hosts from group. Group: [%s]. Id [%s]. Error [%s]." % (group_name,
ssid,
to_native(e)))
all_hosts = 'storage-systems/%s/hosts' % ssid
h_url = api_url + all_hosts
try:
h_rc, h_data = request(h_url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(
msg="Failed in second step getting hosts from group. Group: [%s]. Id [%s]. Error [%s]." % (
group_name,
ssid,
to_native(e)))
hosts_in_group = []
for hg in g_data:
if hg['name'] == group_name:
clusterRef = hg['clusterRef']
for host in h_data:
if host['clusterRef'] == clusterRef:
hosts_in_group.append(host['name'])
return hosts_in_group
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=False),
new_name=dict(required=False),
ssid=dict(required=True),
id=dict(required=False),
state=dict(required=True, choices=['present', 'absent']),
hosts=dict(required=False, type='list'),
api_url=dict(required=True),
api_username=dict(required=True),
validate_certs=dict(required=False, default=True),
api_password=dict(required=True, no_log=True)
),
supports_check_mode=False,
mutually_exclusive=[['name', 'id']],
required_one_of=[['name', 'id']]
)
name = module.params['name']
new_name = module.params['new_name']
ssid = module.params['ssid']
id_num = module.params['id']
state = module.params['state']
hosts = module.params['hosts']
user = module.params['api_username']
pwd = module.params['api_password']
api_url = module.params['api_url']
if not api_url.endswith('/'):
api_url += '/'
if name:
id_type = 'name'
id_key = name
elif id_num:
id_type = 'id'
id_key = id_num
exists, group_data = group_exists(module, id_type, id_key, ssid, api_url, user, pwd)
if state == 'present':
if not exists:
try:
rc, data = create_hostgroup(module, ssid, name, api_url, user, pwd, hosts)
except Exception as e:
module.fail_json(msg="Failed to create a host group. Id [%s]. Error [%s]." % (ssid, to_native(e)))
hosts = get_hosts_in_group(module, ssid, name, api_url, user, pwd)
module.exit_json(changed=True, hosts=hosts, **data)
else:
current_hosts = get_hosts_in_group(module, ssid, name, api_url, user, pwd)
if not current_hosts:
current_hosts = []
if not hosts:
hosts = []
if set(current_hosts) != set(hosts):
try:
rc, data = update_hostgroup(module, ssid, name, api_url, user, pwd, hosts, new_name)
except Exception as e:
module.fail_json(
msg="Failed to update host group. Group: [%s]. Id [%s]. Error [%s]." % (name, ssid, to_native(e)))
module.exit_json(changed=True, hosts=hosts, **data)
else:
for group in group_data:
if group['name'] == name:
module.exit_json(changed=False, hosts=current_hosts, **group)
elif state == 'absent':
if exists:
hg_id = get_hostgroup_id(module, ssid, name, api_url, user, pwd)
try:
rc, data = delete_hostgroup(module, ssid, hg_id, api_url, user, pwd)
except Exception as e:
module.fail_json(
msg="Failed to delete host group. Group: [%s]. Id [%s]. Error [%s]." % (name, ssid, to_native(e)))
module.exit_json(changed=True, msg="Host Group deleted")
else:
module.exit_json(changed=False, msg="Host Group is already absent")
if __name__ == '__main__':
main()
|
samchoii/search-foursquare
|
refs/heads/master
|
node_modules/protractor/node_modules/jasmine/node_modules/jasmine-core/images/__init__.py
|
12133432
| |
Beeblio/django
|
refs/heads/master
|
tests/force_insert_update/__init__.py
|
12133432
| |
google/active-qa
|
refs/heads/master
|
third_party/nmt/__init__.py
|
12133432
| |
NukeAOSP/external_skia
|
refs/heads/jellybean
|
bench/bench_graph_svg.py
|
25
|
'''
Created on May 16, 2011
@author: bungeman
'''
import sys
import getopt
import re
import os
import bench_util
import json
import xml.sax.saxutils
def usage():
"""Prints simple usage information."""
print '-d <dir> a directory containing bench_r<revision>_<scalar> files.'
print '-b <bench> the bench to show.'
print '-c <config> the config to show (GPU, 8888, 565, etc).'
print '-t <time> the time to show (w, c, g, etc).'
print '-s <setting>[=<value>] a setting to show (alpha, scalar, etc).'
print '-r <revision>[:<revision>] the revisions to show.'
print ' Negative <revision> is taken as offset from most recent revision.'
print '-f <revision>[:<revision>] the revisions to use for fitting.'
print ' Negative <revision> is taken as offset from most recent revision.'
print '-x <int> the desired width of the svg.'
print '-y <int> the desired height of the svg.'
print '-l <title> title to use for the output graph'
print '--default-setting <setting>[=<value>] setting for those without.'
class Label:
"""The information in a label.
(str, str, str, str, {str:str})"""
def __init__(self, bench, config, time_type, settings):
self.bench = bench
self.config = config
self.time_type = time_type
self.settings = settings
def __repr__(self):
return "Label(%s, %s, %s, %s)" % (
str(self.bench),
str(self.config),
str(self.time_type),
str(self.settings),
)
def __str__(self):
return "%s_%s_%s_%s" % (
str(self.bench),
str(self.config),
str(self.time_type),
str(self.settings),
)
def __eq__(self, other):
return (self.bench == other.bench and
self.config == other.config and
self.time_type == other.time_type and
self.settings == other.settings)
def __hash__(self):
return (hash(self.bench) ^
hash(self.config) ^
hash(self.time_type) ^
hash(frozenset(self.settings.iteritems())))
def get_latest_revision(directory):
"""Returns the latest revision number found within this directory.
"""
latest_revision_found = -1
for bench_file in os.listdir(directory):
file_name_match = re.match('bench_r(\d+)_(\S+)', bench_file)
if (file_name_match is None):
continue
revision = int(file_name_match.group(1))
if revision > latest_revision_found:
latest_revision_found = revision
if latest_revision_found < 0:
return None
else:
return latest_revision_found
def parse_dir(directory, default_settings, oldest_revision, newest_revision):
"""Parses bench data from files like bench_r<revision>_<scalar>.
(str, {str, str}, Number, Number) -> {int:[BenchDataPoints]}"""
revision_data_points = {} # {revision : [BenchDataPoints]}
for bench_file in os.listdir(directory):
file_name_match = re.match('bench_r(\d+)_(\S+)', bench_file)
if (file_name_match is None):
continue
revision = int(file_name_match.group(1))
scalar_type = file_name_match.group(2)
if (revision < oldest_revision or revision > newest_revision):
continue
file_handle = open(directory + '/' + bench_file, 'r')
if (revision not in revision_data_points):
revision_data_points[revision] = []
default_settings['scalar'] = scalar_type
revision_data_points[revision].extend(
bench_util.parse(default_settings, file_handle))
file_handle.close()
return revision_data_points
def create_lines(revision_data_points, settings
, bench_of_interest, config_of_interest, time_of_interest):
"""Convert revision data into sorted line data.
({int:[BenchDataPoints]}, {str:str}, str?, str?, str?)
-> {Label:[(x,y)] | [n].x <= [n+1].x}"""
revisions = revision_data_points.keys()
revisions.sort()
lines = {} # {Label:[(x,y)] | x[n] <= x[n+1]}
for revision in revisions:
for point in revision_data_points[revision]:
if (bench_of_interest is not None and
not bench_of_interest == point.bench):
continue
if (config_of_interest is not None and
not config_of_interest == point.config):
continue
if (time_of_interest is not None and
not time_of_interest == point.time_type):
continue
skip = False
for key, value in settings.items():
if key in point.settings and point.settings[key] != value:
skip = True
break
if skip:
continue
line_name = Label(point.bench
, point.config
, point.time_type
, point.settings)
if line_name not in lines:
lines[line_name] = []
lines[line_name].append((revision, point.time))
return lines
def bounds(lines):
"""Finds the bounding rectangle for the lines.
{Label:[(x,y)]} -> ((min_x, min_y),(max_x,max_y))"""
min_x = bench_util.Max
min_y = bench_util.Max
max_x = bench_util.Min
max_y = bench_util.Min
for line in lines.itervalues():
for x, y in line:
min_x = min(min_x, x)
min_y = min(min_y, y)
max_x = max(max_x, x)
max_y = max(max_y, y)
return ((min_x, min_y), (max_x, max_y))
def create_regressions(lines, start_x, end_x):
"""Creates regression data from line segments.
({Label:[(x,y)] | [n].x <= [n+1].x}, Number, Number)
-> {Label:LinearRegression}"""
regressions = {} # {Label : LinearRegression}
for label, line in lines.iteritems():
regression_line = [p for p in line if start_x <= p[0] <= end_x]
if (len(regression_line) < 2):
continue
regression = bench_util.LinearRegression(regression_line)
regressions[label] = regression
return regressions
def bounds_slope(regressions):
"""Finds the extreme up and down slopes of a set of linear regressions.
({Label:LinearRegression}) -> (max_up_slope, min_down_slope)"""
max_up_slope = 0
min_down_slope = 0
for regression in regressions.itervalues():
min_slope = regression.find_min_slope()
max_up_slope = max(max_up_slope, min_slope)
min_down_slope = min(min_down_slope, min_slope)
return (max_up_slope, min_down_slope)
def main():
"""Parses command line and writes output."""
try:
opts, _ = getopt.getopt(sys.argv[1:]
, "d:b:c:l:t:s:r:f:x:y:"
, "default-setting=")
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
directory = None
config_of_interest = None
bench_of_interest = None
time_of_interest = None
revision_range = '0:'
regression_range = '0:'
latest_revision = None
requested_height = None
requested_width = None
title = 'Bench graph'
settings = {}
default_settings = {}
def parse_range(range):
"""Takes '<old>[:<new>]' as a string and returns (old, new).
Any revision numbers that are dependent on the latest revision number
will be filled in based on latest_revision.
"""
old, _, new = range.partition(":")
old = int(old)
if old < 0:
old += latest_revision;
if not new:
new = latest_revision;
new = int(new)
if new < 0:
new += latest_revision;
return (old, new)
def add_setting(settings, setting):
"""Takes <key>[=<value>] adds {key:value} or {key:True} to settings."""
name, _, value = setting.partition('=')
if not value:
settings[name] = True
else:
settings[name] = value
try:
for option, value in opts:
if option == "-d":
directory = value
elif option == "-b":
bench_of_interest = value
elif option == "-c":
config_of_interest = value
elif option == "-t":
time_of_interest = value
elif option == "-s":
add_setting(settings, value)
elif option == "-r":
revision_range = value
elif option == "-f":
regression_range = value
elif option == "-x":
requested_width = int(value)
elif option == "-y":
requested_height = int(value)
elif option == "-l":
title = value
elif option == "--default-setting":
add_setting(default_settings, value)
else:
usage()
assert False, "unhandled option"
except ValueError:
usage()
sys.exit(2)
if directory is None:
usage()
sys.exit(2)
latest_revision = get_latest_revision(directory)
oldest_revision, newest_revision = parse_range(revision_range)
oldest_regression, newest_regression = parse_range(regression_range)
revision_data_points = parse_dir(directory
, default_settings
, oldest_revision
, newest_revision)
# Update oldest_revision and newest_revision based on the data we could find
all_revision_numbers = revision_data_points.keys()
oldest_revision = min(all_revision_numbers)
newest_revision = max(all_revision_numbers)
lines = create_lines(revision_data_points
, settings
, bench_of_interest
, config_of_interest
, time_of_interest)
regressions = create_regressions(lines
, oldest_regression
, newest_regression)
output_xhtml(lines, oldest_revision, newest_revision,
regressions, requested_width, requested_height, title)
def qa(out):
"""Stringify input and quote as an xml attribute."""
return xml.sax.saxutils.quoteattr(str(out))
def qe(out):
"""Stringify input and escape as xml data."""
return xml.sax.saxutils.escape(str(out))
def create_select(qualifier, lines, select_id=None):
"""Output select with options showing lines which qualifier maps to it.
((Label) -> str, {Label:_}, str?) -> _"""
options = {} #{ option : [Label]}
for label in lines.keys():
option = qualifier(label)
if (option not in options):
options[option] = []
options[option].append(label)
option_list = list(options.keys())
option_list.sort()
print '<select class="lines"',
if select_id is not None:
print 'id=%s' % qa(select_id)
print 'multiple="true" size="10" onchange="updateSvg();">'
for option in option_list:
print '<option value=' + qa('[' +
reduce(lambda x,y:x+json.dumps(str(y))+',',options[option],"")[0:-1]
+ ']') + '>'+qe(option)+'</option>'
print '</select>'
def output_xhtml(lines, oldest_revision, newest_revision,
regressions, requested_width, requested_height, title):
"""Outputs an svg/xhtml view of the data."""
print '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"',
print '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
print '<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">'
print '<head>'
print '<title>%s</title>' % title
print '</head>'
print '<body>'
output_svg(lines, regressions, requested_width, requested_height)
#output the manipulation controls
print """
<script type="text/javascript">//<![CDATA[
function getElementsByClass(node, searchClass, tag) {
var classElements = new Array();
var elements = node.getElementsByTagName(tag);
var pattern = new RegExp("^|\\s"+searchClass+"\\s|$");
for (var i = 0, elementsFound = 0; i < elements.length; ++i) {
if (pattern.test(elements[i].className)) {
classElements[elementsFound] = elements[i];
++elementsFound;
}
}
return classElements;
}
function getAllLines() {
var selectElem = document.getElementById('benchSelect');
var linesObj = {};
for (var i = 0; i < selectElem.options.length; ++i) {
var lines = JSON.parse(selectElem.options[i].value);
for (var j = 0; j < lines.length; ++j) {
linesObj[lines[j]] = true;
}
}
return linesObj;
}
function getOptions(selectElem) {
var linesSelectedObj = {};
for (var i = 0; i < selectElem.options.length; ++i) {
if (!selectElem.options[i].selected) continue;
var linesSelected = JSON.parse(selectElem.options[i].value);
for (var j = 0; j < linesSelected.length; ++j) {
linesSelectedObj[linesSelected[j]] = true;
}
}
return linesSelectedObj;
}
function objectEmpty(obj) {
for (var p in obj) {
return false;
}
return true;
}
function markSelectedLines(selectElem, allLines) {
var linesSelected = getOptions(selectElem);
if (!objectEmpty(linesSelected)) {
for (var line in allLines) {
allLines[line] &= (linesSelected[line] == true);
}
}
}
function updateSvg() {
var allLines = getAllLines();
var selects = getElementsByClass(document, 'lines', 'select');
for (var i = 0; i < selects.length; ++i) {
markSelectedLines(selects[i], allLines);
}
for (var line in allLines) {
var svgLine = document.getElementById(line);
var display = (allLines[line] ? 'inline' : 'none');
svgLine.setAttributeNS(null,'display', display);
}
}
function mark(markerId) {
for (var line in getAllLines()) {
var svgLineGroup = document.getElementById(line);
var display = svgLineGroup.getAttributeNS(null,'display');
if (display == null || display == "" || display != "none") {
var svgLine = document.getElementById(line+'_line');
if (markerId == null) {
svgLine.removeAttributeNS(null,'marker-mid');
} else {
svgLine.setAttributeNS(null,'marker-mid', markerId);
}
}
}
}
//]]></script>"""
print '<table border="0" width="%s">' % requested_width
print """
<form>
<tr valign="bottom" align="center">
<td width="1">Bench Type</td>
<td width="1">Bitmap Config</td>
<td width="1">Timer Type (Cpu/Gpu/wall)</td>
<td width="1"><!--buttons--></td>
<td width="10%"><!--spacing--></td>"""
print '<td>%s<br></br>revisions r%s - r%s</td>' % (
title,
bench_util.CreateRevisionLink(oldest_revision),
bench_util.CreateRevisionLink(newest_revision))
print '</tr><tr valign="top" align="center">'
print '<td width="1">'
create_select(lambda l: l.bench, lines, 'benchSelect')
print '</td><td width="1">'
create_select(lambda l: l.config, lines)
print '</td><td width="1">'
create_select(lambda l: l.time_type, lines)
all_settings = {}
variant_settings = set()
for label in lines.keys():
for key, value in label.settings.items():
if key not in all_settings:
all_settings[key] = value
elif all_settings[key] != value:
variant_settings.add(key)
for k in variant_settings:
create_select(lambda l: l.settings[k], lines)
print '</td><td width="1"><button type="button"',
print 'onclick=%s' % qa("mark('url(#circleMark)'); return false;"),
print '>Mark Points</button>'
print '<button type="button" onclick="mark(null);">Clear Points</button>'
print """
</td>
<td width="10%"></td>
<td align="left">
<p>Brighter red indicates tests that have gotten worse; brighter green
indicates tests that have gotten better.</p>
<p>To highlight individual tests, hold down CONTROL and mouse over
graph lines.</p>
<p>To highlight revision numbers, hold down SHIFT and mouse over
the graph area.</p>
<p>To only show certain tests on the graph, select any combination of
tests in the selectors at left. (To show all, select all.)</p>
<p>Use buttons at left to mark/clear points on the lines for selected
benchmarks.</p>
</td>
</tr>
</form>
</table>
</body>
</html>"""
def compute_size(requested_width, requested_height, rev_width, time_height):
"""Converts potentially empty requested size into a concrete size.
(Number?, Number?) -> (Number, Number)"""
pic_width = 0
pic_height = 0
if (requested_width is not None and requested_height is not None):
pic_height = requested_height
pic_width = requested_width
elif (requested_width is not None):
pic_width = requested_width
pic_height = pic_width * (float(time_height) / rev_width)
elif (requested_height is not None):
pic_height = requested_height
pic_width = pic_height * (float(rev_width) / time_height)
else:
pic_height = 800
pic_width = max(rev_width*3
, pic_height * (float(rev_width) / time_height))
return (pic_width, pic_height)
def output_svg(lines, regressions, requested_width, requested_height):
"""Outputs an svg view of the data."""
(global_min_x, _), (global_max_x, global_max_y) = bounds(lines)
max_up_slope, min_down_slope = bounds_slope(regressions)
#output
global_min_y = 0
x = global_min_x
y = global_min_y
w = global_max_x - global_min_x
h = global_max_y - global_min_y
font_size = 16
line_width = 2
pic_width, pic_height = compute_size(requested_width, requested_height
, w, h)
def cw(w1):
"""Converts a revision difference to display width."""
return (pic_width / float(w)) * w1
def cx(x):
"""Converts a revision to a horizontal display position."""
return cw(x - global_min_x)
def ch(h1):
"""Converts a time difference to a display height."""
return -(pic_height / float(h)) * h1
def cy(y):
"""Converts a time to a vertical display position."""
return pic_height + ch(y - global_min_y)
print '<svg',
print 'width=%s' % qa(str(pic_width)+'px')
print 'height=%s' % qa(str(pic_height)+'px')
print 'viewBox="0 0 %s %s"' % (str(pic_width), str(pic_height))
print 'onclick=%s' % qa(
"var event = arguments[0] || window.event;"
" if (event.shiftKey) { highlightRevision(null); }"
" if (event.ctrlKey) { highlight(null); }"
" return false;")
print 'xmlns="http://www.w3.org/2000/svg"'
print 'xmlns:xlink="http://www.w3.org/1999/xlink">'
print """
<defs>
<marker id="circleMark"
viewBox="0 0 2 2" refX="1" refY="1"
markerUnits="strokeWidth"
markerWidth="2" markerHeight="2"
orient="0">
<circle cx="1" cy="1" r="1"/>
</marker>
</defs>"""
#output the revisions
print """
<script type="text/javascript">//<![CDATA[
var previousRevision;
var previousRevisionFill;
var previousRevisionStroke
function highlightRevision(id) {
if (previousRevision == id) return;
document.getElementById('revision').firstChild.nodeValue = 'r' + id;
document.getElementById('rev_link').setAttribute('xlink:href',
'http://code.google.com/p/skia/source/detail?r=' + id);
var preRevision = document.getElementById(previousRevision);
if (preRevision) {
preRevision.setAttributeNS(null,'fill', previousRevisionFill);
preRevision.setAttributeNS(null,'stroke', previousRevisionStroke);
}
var revision = document.getElementById(id);
previousRevision = id;
if (revision) {
previousRevisionFill = revision.getAttributeNS(null,'fill');
revision.setAttributeNS(null,'fill','rgb(100%, 95%, 95%)');
previousRevisionStroke = revision.getAttributeNS(null,'stroke');
revision.setAttributeNS(null,'stroke','rgb(100%, 90%, 90%)');
}
}
//]]></script>"""
def print_rect(x, y, w, h, revision):
"""Outputs a revision rectangle in display space,
taking arguments in revision space."""
disp_y = cy(y)
disp_h = ch(h)
if disp_h < 0:
disp_y += disp_h
disp_h = -disp_h
print '<rect id=%s x=%s y=%s' % (qa(revision), qa(cx(x)), qa(disp_y),),
print 'width=%s height=%s' % (qa(cw(w)), qa(disp_h),),
print 'fill="white"',
print 'stroke="rgb(98%%,98%%,88%%)" stroke-width=%s' % qa(line_width),
print 'onmouseover=%s' % qa(
"var event = arguments[0] || window.event;"
" if (event.shiftKey) {"
" highlightRevision('"+str(revision)+"');"
" return false;"
" }"),
print ' />'
xes = set()
for line in lines.itervalues():
for point in line:
xes.add(point[0])
revisions = list(xes)
revisions.sort()
left = x
current_revision = revisions[0]
for next_revision in revisions[1:]:
width = (((next_revision - current_revision) / 2.0)
+ (current_revision - left))
print_rect(left, y, width, h, current_revision)
left += width
current_revision = next_revision
print_rect(left, y, x+w - left, h, current_revision)
#output the lines
print """
<script type="text/javascript">//<![CDATA[
var previous;
var previousColor;
var previousOpacity;
function highlight(id) {
if (previous == id) return;
document.getElementById('label').firstChild.nodeValue = id;
var preGroup = document.getElementById(previous);
if (preGroup) {
var preLine = document.getElementById(previous+'_line');
preLine.setAttributeNS(null,'stroke', previousColor);
preLine.setAttributeNS(null,'opacity', previousOpacity);
var preSlope = document.getElementById(previous+'_linear');
if (preSlope) {
preSlope.setAttributeNS(null,'visibility', 'hidden');
}
}
var group = document.getElementById(id);
previous = id;
if (group) {
group.parentNode.appendChild(group);
var line = document.getElementById(id+'_line');
previousColor = line.getAttributeNS(null,'stroke');
previousOpacity = line.getAttributeNS(null,'opacity');
line.setAttributeNS(null,'stroke', 'blue');
line.setAttributeNS(null,'opacity', '1');
var slope = document.getElementById(id+'_linear');
if (slope) {
slope.setAttributeNS(null,'visibility', 'visible');
}
}
}
//]]></script>"""
for label, line in lines.items():
print '<g id=%s>' % qa(label)
r = 128
g = 128
b = 128
a = .10
if label in regressions:
regression = regressions[label]
min_slope = regression.find_min_slope()
if min_slope < 0:
d = max(0, (min_slope / min_down_slope))
g += int(d*128)
a += d*0.9
elif min_slope > 0:
d = max(0, (min_slope / max_up_slope))
r += int(d*128)
a += d*0.9
slope = regression.slope
intercept = regression.intercept
min_x = regression.min_x
max_x = regression.max_x
print '<polyline id=%s' % qa(str(label)+'_linear'),
print 'fill="none" stroke="yellow"',
print 'stroke-width=%s' % qa(abs(ch(regression.serror*2))),
print 'opacity="0.5" pointer-events="none" visibility="hidden"',
print 'points="',
print '%s,%s' % (str(cx(min_x)), str(cy(slope*min_x + intercept))),
print '%s,%s' % (str(cx(max_x)), str(cy(slope*max_x + intercept))),
print '"/>'
print '<polyline id=%s' % qa(str(label)+'_line'),
print 'onmouseover=%s' % qa(
"var event = arguments[0] || window.event;"
" if (event.ctrlKey) {"
" highlight('"+str(label).replace("'", "\\'")+"');"
" return false;"
" }"),
print 'fill="none" stroke="rgb(%s,%s,%s)"' % (str(r), str(g), str(b)),
print 'stroke-width=%s' % qa(line_width),
print 'opacity=%s' % qa(a),
print 'points="',
for point in line:
print '%s,%s' % (str(cx(point[0])), str(cy(point[1]))),
print '"/>'
print '</g>'
#output the labels
print '<text id="label" x="0" y=%s' % qa(font_size),
print 'font-size=%s> </text>' % qa(font_size)
print '<a id="rev_link" xlink:href="" target="_top">'
print '<text id="revision" x="0" y=%s style="' % qa(font_size*2)
print 'font-size: %s; ' % qe(font_size)
print 'stroke: #0000dd; text-decoration: underline; '
print '"> </text></a>'
print '</svg>'
if __name__ == "__main__":
main()
|
caotianwei/django
|
refs/heads/master
|
django/core/management/commands/startapp.py
|
513
|
from importlib import import_module
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
class Command(TemplateCommand):
help = ("Creates a Django app directory structure for the given app "
"name in the current directory or optionally in the given "
"directory.")
missing_args_message = "You must provide an application name."
def handle(self, **options):
app_name, target = options.pop('name'), options.pop('directory')
self.validate_name(app_name, "app")
# Check that the app_name cannot be imported.
try:
import_module(app_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as an app "
"name. Please try another name." % app_name)
super(Command, self).handle('app', app_name, target, **options)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.