code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.models import Task
class TaskIndexAPIView(APIView):
def get(self):
queryset = Task.query.order_by(Task.date_created.desc())
return self.paginate(queryset)
|
bowlofstew/changes
|
changes/api/task_index.py
|
Python
|
apache-2.0
| 268
|
class B(object):
def __init__<warning descr="Signature is not compatible to __new__">(self)</warning>: # error
pass
def __new__<warning descr="Signature is not compatible to __init__">(cls, x, y)</warning>: # error
pass
class A1(B):
pass
class A2(A1):
def __new__<warning descr="Signature is not compatible to __init__">(cls, a)</warning>: # error
pass
class A3(A2):
def __new__(cls, *a): # ok
pass
class C(object):
def __new__(cls, *args, **kwargs):
pass
class C1(C):
def __init__(self, a): # OK
pass
# PY-846
from seob import SeoB
class SeoA(SeoB):
pass
import enum
# PY-24749
class Planet(enum.Enum):
EARTH = (5.976e+24, 6.37814e6)
def __init__(self, mass, radius): # OK
pass
|
asedunov/intellij-community
|
python/testData/inspections/PyInitNewSignatureInspection/test.py
|
Python
|
apache-2.0
| 780
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""extend base_image_id to 100 chars
Revision ID: 43df309dbf3f
Revises: 40df542e345
Create Date: 2015-06-30 20:39:56.988397
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '43df309dbf3f'
down_revision = '40df542e345'
def upgrade():
op.alter_column('image', 'base_image_id',
type_=sa.String(length=100),
existing_type=sa.String(length=36),
existing_nullable=True,
)
def downgrade():
op.alter_column('image', 'base_image_id',
type_=sa.String(length=36),
existing_type=sa.String(length=100),
existing_nullable=True,
)
|
stackforge/solum
|
solum/objects/sqlalchemy/migration/alembic_migrations/versions/43df309dbf3f_extend_base_image_id_to_100_chars.py
|
Python
|
apache-2.0
| 1,292
|
#!/usr/bin/env python
#coding=utf-8
# Copyright (C) 2011, Alibaba Cloud Computing
#Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from oss.oss_api import *
from oss.oss_util import *
from oss.oss_xml_handler import *
from aliyunCliParser import aliyunCliParser
import signal
import ConfigParser
from optparse import OptionParser
from optparse import Values
import os
import re
import time
import Queue
import sys
import socket
import shutil
reload(sys)
sys.setdefaultencoding("utf-8")
CMD_LIST = {}
HELP_CMD_LIST = ['--help','-h','help']
ACL_LIST = ['private', 'public-read', 'public-read-write']
OSS_PREFIX = 'oss://'
CONFIGFILE = "%s/.aliyuncli/osscredentials" % os.path.expanduser('~')
CONFIGSECTION = 'OSSCredentials'
DEFAUL_HOST = "oss.aliyuncs.com"
OSS_HOST = DEFAUL_HOST
ID = ""
KEY = ""
STS_TOKEN = None
TOTAL_PUT = AtomicInt()
PUT_OK = AtomicInt()
PUT_FAIL = AtomicInt()
PUT_SKIP = AtomicInt()
FILE_NUM_TOTAL = AtomicInt()
FILE_NUM_OK = AtomicInt()
GET_OK = AtomicInt()
GET_FAIL = AtomicInt()
GET_SKIP = AtomicInt()
DELETE_OK = AtomicInt()
COPY_OK = AtomicInt()
SEND_BUF_SIZE = 8192
RECV_BUF_SIZE = 1024*1024*10
MAX_OBJECT_SIZE = 5*1024*1024*1024
MAX_RETRY_TIMES = 3
IS_DEBUG = False
ERROR_FILE_LIST = []
AUTO_DUMP_FILE_NUM = 50
RET_OK = 0
RET_FAIL = -1
RET_SKIP = 1
lock = threading.Lock()
HELP = \
'''The valid command as follows::
GetAllBucket
CreateBucket oss://bucket --acl [acl] --location [location]
DeleteBucket oss://bucket
DeleteWholeBucket oss://bucket
GetBucketLocation oss://bucket
PutBucketCors oss://bucket localfile
GetBucketCors oss://bucket
DeleteBucketCors oss://bucket
PutBucketLogging oss://source_bucket oss://target_bucket/[prefix]
GetBucketLogging oss://bucket
DeleteBucketLogging oss://bucket
PutBucketWebsite oss://bucket indexfile [errorfile]
GetBucketWebsite oss://bucket
DeleteBucketWebsite oss://bucket
PutBucketLifeCycle oss://bucket localfile
GetBucketLifeCycle oss://bucket
DeleteBucketLifeCycle oss://bucket
PutBucketReferer oss://bucket --allow_empty_referer true --referer "referer1,referer2,...,refererN"
GetBucketReferer oss://bucket
GetAcl oss://bucket
SetAcl oss://bucket --acl [acl]
allow private, public-read, public-read-write
List oss://bucket/[prefix] [marker] [delimiter] [maxkeys]
oss://bucket/[prefix] --marker xxx --delimiter xxx --maxkeys xxx
MkDir oss://bucket/dirname
ListAllObject oss://bucket/[prefix]
ListAllDir oss://bucket/[prefix]
DeleteAllObject oss://bucket/[prefix] --force false
DownloadAllObject oss://bucket/[prefix] localdir --replace false --thread_num 5
DownloadToDir oss://bucket/[prefix] localdir --replace false --temp_dir xxx --thread_num 5
UploadObjectFromLocalDir localdir oss://bucket/[prefix] --check_point check_point_file --replace false --check_md5 false --thread_num 5
Put oss://bucket/object --content_type [content_type] --headers \"key1:value1#key2:value2\" --check_md5 false
Get oss://bucket/object localfile
MultiGet oss://bucket/object localfile --thread_num 5
Cat oss://bucket/object
Meta oss://bucket/object
Info oss://bucket/object
Copy oss://source_bucket/source_object oss://target_bucket/target_object --headers \"key1:value1#key2:value2\"
CopyLargeFile oss://source_bucket/source_object oss://target_bucket/target_object --part_size 10*1024*1024 --upload_id xxx
CopyBucket oss://source_bucket/[prefix] oss://target_bucket/[prefix] --headers \"key1:value1\" --replace false
Delete oss://bucket/object
SignUrl oss://bucket/object --timeout [timeout_seconds]
CreateLinkFromFile oss://bucket/object object_name_list_file
CreateLink oss://bucket/object object1 object2 ... objectN
GetLinkIndex oss://bucket/object
Options oss://bucket/[object] --origin xxx --method [GET, PUT, DELETE, HEAD, POST]
UploadDisk localdir oss://bucket/[prefix] [--check_point check_point_file --filename filename_file --replace false --content_type xxx --skip_dir false --skip_suffix false --out xxx] --device_id xxx --check_md5 false
Init oss://bucket/object
ListPart oss://bucket/object --upload_id xxx
ListParts oss://bucket
GetAllPartSize oss://bucket
Cancel oss://bucket/object --upload_id xxx
MultiUpload localfile oss://bucket/object --upload_id xxx --thread_num 10 --max_part_num 1000 --check_md5 false
UploadPartFromFile localfile oss://bucket/object --upload_id xxx --part_number xxx
UploadPartFromString oss://bucket/object --upload_id xxx --part_number xxx --data xxx
Config --host oss.aliyuncs.com --accessid accessid --accesskey accesskey --sts_token token
'''
def print_result(cmd, res):
'''
Print HTTP Response if failedd.
'''
try:
if res.status / 100 == 2:
pass
else:
body = res.read()
print "Error Headers:\n"
print res.getheaders()
print "Error Body:\n"
print body[0:1024]
print "Error Status:\n"
print res.status
print cmd, "Failed!"
if res.status == 403:
check_endpoint_error(body)
exit(-1)
except AttributeError:
pass
def format_size(size):
size = float(size)
coeffs = ['K', 'M', 'G', 'T']
coeff = ""
while size > 2048:
size /= 1024
coeff = coeffs.pop(0)
return str("%.2f"%size) + coeff + "B"
def format_utf8(string):
string = smart_code(string)
if isinstance(string, unicode):
string = string.encode('utf-8')
return string
def split_path(path):
if not path.lower().startswith(OSS_PREFIX):
print "%s parameter %s invalid, " \
"must be start with %s" % \
(args[0], args[1], OSS_PREFIX)
sys.exit(1)
pather = path[len(OSS_PREFIX):].split('/')
return pather
def check_upload_id(upload_id):
upload_id_len = 32
if len(upload_id) != upload_id_len:
print "upload_id is a 32-bit string generated by OSS"
print "you can get valid upload_id by init or listparts command"
sys.exit(1)
def check_bucket(bucket):
if len(bucket) == 0:
print "Bucket should not be empty!"
print "Please input oss://bucket"
sys.exit(1)
def check_object(object):
if len(object) == 0:
print "Object should not be empty!"
print "Please input oss://bucket/object"
sys.exit(1)
if object.startswith("/"):
print "object name should not begin with / "
sys.exit(-1)
def check_localfile(localfile):
if not os.path.isfile(localfile):
print "%s is not existed!" % localfile
sys.exit(1)
def check_args(argv, args=None):
if not args:
args = []
if len(args) < argv:
print "%s miss parameters" % args[0]
sys.exit(1)
def check_bucket_object(bucket, object):
check_bucket(bucket)
check_object(object)
def parse_bucket_object(path):
pather = split_path(path)
bucket = ""
object = ""
if len(pather) > 0:
bucket = pather[0]
if len(pather) > 1:
object += '/'.join(pather[1:])
object = smart_code(object)
if object.startswith("/"):
print "object name SHOULD NOT begin with /"
sys.exit(1)
return (bucket, object)
def parse_bucket(path):
bucket = path
if bucket.startswith(OSS_PREFIX):
bucket = bucket[len(OSS_PREFIX):]
tmp_list = bucket.split("/")
if len(tmp_list) > 0:
bucket = tmp_list[0]
return bucket
def check_endpoint_error(xml_string):
try:
xml = minidom.parseString(xml_string)
end_point = get_tag_text(xml, 'Endpoint')
if end_point:
print 'You should send all request to %s' % end_point
except:
pass
def cmd_listing(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
(bucket, object) = parse_bucket_object(args[1])
if len(bucket) == 0:
return cmd_getallbucket(args, options)
prefix = object
marker = ''
delimiter = ''
maxkeys = 1000
if options.marker:
marker = options.marker
if options.delimiter:
delimiter = options.delimiter
if options.maxkeys:
maxkeys = options.maxkeys
if len(args) == 3:
marker = args[2]
elif len(args) == 4:
marker = args[2]
delimiter = args[3]
elif len(args) >= 5:
marker = args[2]
delimiter = args[3]
maxkeys = args[4]
prefix = smart_code(prefix)
marker = smart_code(marker)
delimiter = smart_code(delimiter)
maxkeys = smart_code(maxkeys)
exclude = options.exclude
res = get_oss().get_bucket(bucket, prefix, marker, delimiter, maxkeys)
if (res.status / 100) == 2:
body = res.read()
hh = GetBucketXml(body)
(fl, pl) = hh.list()
print "prefix list is: "
for i in pl:
if exclude and i.startswith(exclude):
continue
print i
print "object list is: "
for i in fl:
if len(i) == 7:
try:
if exclude and i[0].startswith(exclude):
continue
print "%16s %6s %8s %s/%s" % (convert_to_localtime(i[1]), format_size((int)(i[3])), i[6], OSS_PREFIX + bucket, i[0])
except:
print "Exception when print :", i
print "\nprefix list number is: %s " % len(pl)
print "object list number is: %s " % len(fl)
return res
def cmd_listparts(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
(bucket, object) = parse_bucket_object(args[1])
if len(bucket) == 0:
return cmd_getallbucket(args, options)
print "%20s %20s %20s" % ("UploadId", "Path", "InitTime")
for i in get_all_upload_id_list(get_oss(), bucket, object):
print "%20s oss://%s/%s %20s" % (i[1], bucket, i[0], convert_to_localtime(i[2]))
def cmd_getallpartsize(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
(bucket, object) = parse_bucket_object(args[1])
if len(bucket) == 0:
return cmd_getallbucket(args, options)
total_part_size = 0
print "%5s %20s %20s %s" % ("Number", "UploadId", "Size", "Path")
for i in get_all_upload_id_list(get_oss(), bucket):
upload_id = i[1]
object = i[0]
for i in get_part_list(get_oss(), bucket, object, upload_id):
part_size = (int)(i[2])
total_part_size += part_size
print "%5s %20s %10s oss://%s/%s" % (i[0], upload_id, format_size(part_size), bucket, object)
print "totalsize is: real:%s, format:%s " % (total_part_size, format_size(total_part_size))
def cmd_init_upload(args, options):
check_args(2, args)
path = args[1]
(bucket, object) = parse_bucket_object(path)
check_bucket_object(bucket, object)
upload_id = get_upload_id(get_oss(), bucket, object)
print 'Upload Id: %s' % (upload_id)
def cmd_listpart(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
path = args[1]
(bucket, object) = parse_bucket_object(path)
if len(bucket) == 0:
return cmd_getallbucket(args, options)
if options.upload_id is None:
print "upload_id invalid, please set with --upload_id=xxx"
sys.exit(1)
print "%5s %32s %20s %20s" % ("PartNumber".ljust(10), "ETag".ljust(34), "Size".ljust(20), "LastModifyTime".ljust(32))
for i in get_part_list(get_oss(), bucket, object, options.upload_id):
if len(i) >= 4:
print "%s %s %s %s" % (str(i[0]).ljust(10), str(i[1]).ljust(34), str(i[2]).ljust(20), str(i[3]).ljust(32))
def cmd_upload_part_from_file(args, options):
check_args(3, args)
localfile = args[1]
check_localfile(localfile)
path = args[2]
(bucket, object) = parse_bucket_object(path)
check_bucket_object(bucket, object)
if options.upload_id is None:
print "upload_id invalid, please set with --upload_id=xxx"
sys.exit(1)
if options.part_number is None:
print "part_number invalid, please set with --part_number=xxx"
sys.exit(1)
res = get_oss().upload_part(bucket, object, localfile, options.upload_id, options.part_number)
return res
def cmd_upload_part_from_string(args, options):
check_args(2, args)
path = args[1]
(bucket, object) = parse_bucket_object(path)
check_bucket_object(bucket, object)
if options.upload_id is None:
print "upload_id invalid, please set with --upload_id=xxx"
sys.exit(1)
if options.part_number is None:
print "part_number invalid, please set with --part_number=xxx"
sys.exit(1)
if options.data is None:
print "data invalid, please set with --data=xxx"
sys.exit(1)
res = get_oss().upload_part_from_string(bucket, object, options.data, options.upload_id, options.part_number)
return res
def cmd_listallobject(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
path = args[1]
(bucket, object) = parse_bucket_object(path)
if len(bucket) == 0:
return cmd_getallbucket(args, options)
prefix = object
marker = ""
total_object_num = 0
totalsize = 0
totaltimes = 0
delimiter = ''
maxkeys = '1000'
if options.out:
f = open(options.out, "w")
while 1:
res = get_oss().get_bucket(bucket, prefix, marker, delimiter, maxkeys)
if res.status != 200:
return res
body = res.read()
(tmp_object_list, marker) = get_object_list_marker_from_xml(body)
for i in tmp_object_list:
object = i[0]
length = i[1]
last_modify_time = i[2]
total_object_num += 1
totalsize += (int)(length)
if options.exclude:
exclude = options.exclude
if object.startswith(exclude):
continue
msg = "%s%s/%s" % (OSS_PREFIX, bucket, object)
print "%16s %6s %s/%s " % (convert_to_localtime(last_modify_time), format_size(length), OSS_PREFIX + bucket, object)
if options.out:
f.write(msg)
f.write("\n")
totaltimes += 1
if len(marker) == 0:
break
if options.out:
f.close()
print "the object list result is saved into %s" % options.out
print "object list number is: %s " % total_object_num
print "totalsize is: real:%s, format:%s " % (totalsize, format_size(totalsize))
print "request times is: %s" % totaltimes
return res
def cmd_listalldir(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
path = args[1]
(bucket, object) = parse_bucket_object(path)
if len(bucket) == 0:
return cmd_getallbucket(args, options)
prefix = object
if prefix and not prefix.endswith("/"):
prefix = "%s/" % prefix
marker = ""
total_object_num = 0
totalsize = 0
totaltimes = 0
delimiter = '/'
maxkeys = '1000'
while 1:
res = get_oss().get_bucket(bucket, prefix, marker, delimiter, maxkeys)
if res.status != 200:
return res
body = res.read()
(tmp_object_list, marker) = get_dir_list_marker_from_xml(body)
for i in tmp_object_list:
if i.endswith("/"):
i = i[:-1]
msg = "%s" % (os.path.basename(i))
print msg
total_object_num += 1
totaltimes += 1
if len(marker) == 0:
break
print "\ncommon prefix list number is: %s " % total_object_num
print "request times is: %s" % totaltimes
return res
def get_object(bucket, object, object_prefix, local_path, length, last_modify_time, replace, retry_times = MAX_RETRY_TIMES, temp_dir = None):
'''
return RET_OK, RET_FAIL, RET_SKIP
'''
show_bar = False
object = smart_code(object)
tmp_object = object
if object_prefix == object[:len(object_prefix)]:
tmp_object = object[len(object_prefix):]
while 1:
if not tmp_object.startswith("/"):
break
tmp_object = tmp_object[1:]
localfile = os.path.join(local_path, tmp_object)
localfile = smart_code(localfile)
temp_filename = ''
if temp_dir:
temp_filename = get_unique_temp_filename(temp_dir, localfile)
for i in xrange(retry_times):
try:
if os.path.isfile(localfile):
if replace:
os.remove(localfile)
else:
t1 = last_modify_time
t2 = (int)(os.path.getmtime(localfile))
if (int)(length) == os.path.getsize(localfile) and t1 < t2:
#skip download this object these conditions match
print "no need to get %s/%s to %s" % (bucket, object, localfile)
return RET_SKIP
else:
try:
dirname = os.path.dirname(localfile)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if temp_dir:
dirname = os.path.dirname(temp_filename)
if not os.path.isdir(dirname):
os.makedirs(dirname)
except:
pass
filename = localfile
if temp_dir:
filename = temp_filename
if os.path.isdir(filename):
print "no need to get %s/%s to %s" % (bucket, object, filename)
return RET_SKIP
ret = continue_get(bucket, object, filename)
if ret:
print "get %s/%s to %s OK" % (bucket, object, localfile)
if temp_dir:
shutil.move(temp_filename, localfile)
pass
return RET_OK
else:
print "get %s/%s to %s FAIL" % (bucket, object, localfile)
except:
print "get %s/%s to %s exception" % (bucket, object, localfile)
print sys.exc_info()[0], sys.exc_info()[1]
os.remove(temp_filename)
return RET_FAIL
class DownloadObjectWorker(threading.Thread):
def __init__(self, retry_times, queue):
threading.Thread.__init__(self)
self.queue = queue
self.retry_times = retry_times
self.ok_num = 0
self.fail_num = 0
self.skip_num = 0
def run(self):
while 1:
try:
(get_object, bucket, object, object_prefix, local_path, length, last_modify_time, replace, retry_times, temp_dir) = self.queue.get(block=False)
ret = get_object(bucket, object, object_prefix, local_path, length, last_modify_time, replace, self.retry_times, temp_dir)
if ret == RET_OK:
self.ok_num += 1
elif ret == RET_SKIP:
self.skip_num += 1
else:
self.fail_num += 1
self.queue.task_done()
except Queue.Empty:
break
except:
self.fail_num += 1
print sys.exc_info()[0], sys.exc_info()[1]
self.queue.task_done()
global GET_SKIP
global GET_OK
global GET_FAIL
lock.acquire()
GET_SKIP += self.skip_num
GET_OK += self.ok_num
GET_FAIL += self.fail_num
lock.release()
def cmd_downloadallobject(args, options):
check_args(3, args)
path = args[1]
(bucket, object) = parse_bucket_object(path)
check_bucket(bucket)
local_path = args[2]
if os.path.isfile(local_path):
print "%s is not dir, please input localdir" % local_path
exit(-1)
replace = False
if options.replace is not None and options.replace.lower() == "true":
replace = True
prefix = object
thread_num = 5
if options.thread_num:
thread_num = (int)(options.thread_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
temp_dir = None
if options.temp_dir:
temp_dir = options.temp_dir
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
marker = ""
delimiter = ''
maxkeys = '1000'
handled_obj_num = 0
while 1:
queue = Queue.Queue(0)
for i in xrange(0, retry_times):
res = get_oss().get_bucket(bucket, prefix, marker, delimiter, maxkeys)
if res.status/100 == 5:
continue
else:
break
if res.status != 200:
return res
body = res.read()
(tmp_object_list, marker) = get_object_list_marker_from_xml(body)
for i in tmp_object_list:
object = i[0]
length = i[1]
last_modify_time = format_unixtime(i[2])
if str(length) == "0" and object.endswith("/"):
continue
handled_obj_num += 1
queue.put((get_object, bucket, object, prefix, local_path, length, last_modify_time, replace, MAX_RETRY_TIMES, temp_dir))
thread_pool = []
for i in xrange(thread_num):
current = DownloadObjectWorker(retry_times, queue)
thread_pool.append(current)
current.start()
queue.join()
for item in thread_pool:
item.join()
if len(marker) == 0:
break
global GET_OK
global GET_SKIP
global GET_FAIL
print "Total being downloaded objects num: %s, they are downloaded into %s" % (GET_OK + GET_FAIL + GET_SKIP, local_path)
print "OK num:%s, SKIP num:%s, FAIL num:%s" % (GET_OK, GET_SKIP, GET_FAIL)
if temp_dir and os.path.abspath(local_path) != os.path.abspath(temp_dir):
shutil.rmtree(temp_dir, True)
if GET_FAIL != 0:
exit(-1)
def put_object(bucket, object, local_file, local_modify_time, is_replace, is_check_md5=False, content_type="", multipart_threshold=100*1024*1024, retry_times=2):
'''
return RET_OK, RET_FAIL, RET_SKIP
'''
if not os.path.isfile(local_file):
print "upload %s FAIL, no such file." % (local_file)
return RET_FAIL
show_bar = False
oss = get_oss(show_bar)
object = smart_code(object)
if len(object) == 0:
print "object is empty when put /%s/%s, skip" % (bucket, object)
return RET_SKIP
local_file_size = os.path.getsize(local_file)
if not is_replace:
try:
res = oss.head_object(bucket, object)
if res.status == 200 and str(local_file_size) == res.getheader('content-length'):
oss_gmt = res.getheader('last-modified')
format = "%a, %d %b %Y %H:%M:%S GMT"
oss_last_modify_time = format_unixtime(oss_gmt, format)
if not local_modify_time:
local_modify_time = (int)(os.path.getmtime(local_file))
if oss_last_modify_time >= local_modify_time:
#print "upload %s is skipped" % (local_file)
return RET_SKIP
except:
print "%s %s" % (sys.exc_info()[0], sys.exc_info()[1])
if is_check_md5:
md5string, base64md5 = get_file_md5(local_file)
for i in xrange(retry_times):
try:
if local_file_size > multipart_threshold:
upload_id = ""
thread_num = 5
max_part_num = 10000
headers = {}
if is_check_md5:
headers['x-oss-meta-md5'] = md5string
if content_type:
headers['Content-Type'] = content_type
res = oss.multi_upload_file(bucket, object, local_file, upload_id, thread_num, max_part_num, headers, check_md5=is_check_md5)
else:
headers = {}
if is_check_md5:
headers['Content-MD5'] = base64md5
headers['x-oss-meta-md5'] = md5string
res = oss.put_object_from_file(bucket, object, local_file, content_type, headers)
if 200 == res.status:
return RET_OK
else:
print "upload %s to /%s/%s FAIL, status:%s, request-id:%s" % (local_file, bucket, object, res.status, res.getheader("x-oss-request-id"))
except:
print "upload %s/%s from %s exception" % (bucket, object, local_file)
print sys.exc_info()[0], sys.exc_info()[1]
return RET_FAIL
class UploadObjectWorker(threading.Thread):
def __init__(self, check_point_file, retry_times, queue):
threading.Thread.__init__(self)
self.check_point_file = check_point_file
self.queue = queue
self.file_time_map = {}
self.error_file_list = []
self.retry_times = retry_times
self.ok_num = 0
self.fail_num = 0
self.skip_num = 0
def run(self):
global PUT_SKIP
global PUT_OK
global PUT_FAIL
global TOTAL_PUT
global FILE_NUM_OK
while 1:
try:
(put_object, bucket, object, local_file, local_modify_time, is_replace, is_check_md5, content_type, multipart_threshold) = self.queue.get(block=False)
ret = put_object(bucket, object, local_file, local_modify_time, is_replace, is_check_md5, content_type, multipart_threshold, self.retry_times)
is_ok = False
if ret == RET_OK:
is_ok = True
self.ok_num += 1
PUT_OK += 1
FILE_NUM_OK += 1
elif ret == RET_SKIP:
is_ok = True
self.skip_num += 1
PUT_SKIP += 1
FILE_NUM_OK += 1
else:
self.fail_num += 1
PUT_FAIL += 1
self.error_file_list.append(local_file)
if is_ok:
local_file_full_path = os.path.abspath(local_file)
local_file_full_path = format_utf8(local_file_full_path)
self.file_time_map[local_file_full_path] = (int)(os.path.getmtime(local_file))
sum = (PUT_SKIP + PUT_OK + PUT_FAIL)
if TOTAL_PUT > 0:
exec("rate = 100*%s/(%s*1.0)" % (sum, TOTAL_PUT))
else:
rate = 0
print '\rOK:%s, FAIL:%s, SKIP:%s, TOTAL_DONE:%s, TOTAL_TO_DO:%s, PROCESS:%.2f%%' % (PUT_OK, PUT_FAIL, PUT_SKIP, sum, TOTAL_PUT, rate),
sys.stdout.flush()
if self.ok_num % AUTO_DUMP_FILE_NUM == 0:
if len(self.file_time_map) != 0:
dump_check_point(self.check_point_file, self.file_time_map)
self.file_time_map = {}
self.queue.task_done()
except Queue.Empty:
break
except:
PUT_FAIL += 1
print sys.exc_info()[0], sys.exc_info()[1]
self.queue.task_done()
if len(self.error_file_list) != 0:
lock.acquire()
ERROR_FILE_LIST.extend(self.error_file_list)
lock.release()
if len(self.file_time_map) != 0:
dump_check_point(self.check_point_file, self.file_time_map)
def load_check_point(check_point_file):
file_time_map = {}
if os.path.isfile(check_point_file):
f = open(check_point_file)
for line in f:
line = line.strip()
tmp_list = line.split('#')
if len(tmp_list) > 1:
time_stamp = (float)(tmp_list[0])
time_stamp = (int)(time_stamp)
#file_name = "".join(tmp_list[1:])
file_name = line[len(tmp_list[0])+1:]
file_name = format_utf8(file_name)
if file_time_map.has_key(file_name) and file_time_map[file_name] > time_stamp:
continue
file_time_map[file_name] = time_stamp
f.close()
return file_time_map
def load_filename(filename_file):
filenames = []
if os.path.isfile(filename_file):
f = open(filename_file)
for line in f:
line = line.strip()
filenames.append(line)
return filenames
def dump_filename(filename_file, filenames=None):
if len(filename_file) == 0 or len(filenames) == 0:
return
try:
f = open(filename_file,"w")
for filename in filenames:
line = "%s\n" %(filename)
f.write(line)
except:
pass
try:
f.close()
except:
pass
def dump_check_point(check_point_file, result_map=None):
if len(check_point_file) == 0 or len(result_map) == 0:
return
lock.acquire()
old_file_time_map = {}
if os.path.isfile(check_point_file):
old_file_time_map = load_check_point(check_point_file)
try:
f = open(check_point_file,"w")
for k, v in result_map.items():
if old_file_time_map.has_key(k) and old_file_time_map[k] < v:
del old_file_time_map[k]
line = "%s#%s\n" % (v, k)
line = format_utf8(line)
f.write(line)
for k, v in old_file_time_map.items():
line = "%s#%s\n" % (v, k)
line = format_utf8(line)
f.write(line)
except:
pass
try:
f.close()
except:
pass
lock.release()
def format_object(object):
tmp_list = object.split(os.sep)
object = "/".join(x for x in tmp_list if x.strip() and x != "/")
while 1:
if object.find('//') == -1:
break
object = object.replace('//', '/')
return object
def get_object_name(filename, filepath):
filename = format_object(filename)
filepath = format_object(filepath)
file_name = os.path.basename(filename)
return file_name
def get_file_names_from_disk(path, topdown):
filenames = []
for root, dirs, files in os.walk(path, topdown):
for filespath in files:
filename = os.path.join(root, filespath)
filenames.append(filename)
return filenames
#for offline uploadfile to oss
def cmd_upload_disk(args, options):
check_args(3, args)
path = args[2]
(bucket, object) = parse_bucket_object(path)
check_bucket(bucket)
local_path = args[1]
if not os.path.isdir(local_path):
print "%s is not dir, please input localdir" % local_path
exit(-1)
if not local_path.endswith(os.sep):
local_path = "%s%s" % (local_path, os.sep)
if not options.device_id:
print "please set device id with --device_id=xxx"
exit(-1)
check_point_file = ""
is_check_point = False
file_time_map = {}
if options.check_point:
is_check_point = True
check_point_file = options.check_point
file_time_map = load_check_point(check_point_file)
filename_file = ""
filenames = []
is_filename_file = False
if options.filename_list:
filename_file = options.filename_list
if os.path.isfile(filename_file):
is_filename_file = True
filenames = load_filename(filename_file)
prefix = object
is_replace = False
if options.replace:
if options.replace.lower() == "true":
is_replace = True
thread_num = 5
if options.thread_num:
thread_num = (int)(options.thread_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
is_check_md5 = False
if options.check_md5:
if options.check_md5.lower() == "true":
is_check_md5 = True
multipart_threshold = 100*1024*1024
if options.multipart_threshold:
multipart_threshold = (int)(options.multipart_threshold)
total_upload_num = 0
topdown = True
def process_localfile(items):
queue = Queue.Queue(0)
for local_file in items:
if os.path.isfile(local_file):
local_modify_time = 0
local_file_full_path = os.path.abspath(local_file)
local_file_full_path = format_utf8(local_file_full_path)
if is_check_point and file_time_map.has_key(local_file_full_path):
local_modify_time = (int)(os.path.getmtime(local_file))
record_modify_time = file_time_map[local_file_full_path]
if local_modify_time <= record_modify_time:
print 'file:%s already upload' %(local_file_full_path)
global FILE_NUM_OK
FILE_NUM_OK += 1
continue
if options.skip_dir and options.skip_dir.lower() == "true":
object = smart_code(os.path.basename(local_file))
else:
object = smart_code(local_file)
if options.strip_dir:
strip_dir = options.strip_dir
if not strip_dir.endswith("/"):
strip_dir = "%s/" % strip_dir
if object.startswith(strip_dir):
object = object[len(options.strip_dir):]
if options.skip_suffix and options.skip_suffix.lower() == "true":
pos = object.rfind(".")
if pos != -1:
object = object[:pos]
while 1:
if object.startswith("/"):
object = object[1:]
else:
break
if prefix:
if prefix.endswith("/"):
object = "%s%s" % (prefix, object)
else:
object = "%s/%s" % (prefix, object)
queue.put((put_object, bucket, object, local_file, local_modify_time, is_replace, is_check_md5, options.content_type, multipart_threshold))
qsize = queue.qsize()
global TOTAL_PUT
TOTAL_PUT += qsize
thread_pool = []
for i in xrange(thread_num):
current = UploadObjectWorker(check_point_file, retry_times, queue)
thread_pool.append(current)
current.start()
queue.join()
for item in thread_pool:
item.join()
return qsize
if not is_filename_file:
filenames = get_file_names_from_disk(local_path, topdown);
dump_filename(filename_file, filenames)
global FILE_NUM_TOTAL
FILE_NUM_TOTAL += len(filenames)
total_upload_num += process_localfile(filenames);
print ""
print "DEVICEID:sn%s" % options.device_id
global PUT_OK
global PUT_SKIP
global PUT_FAIL
print "This time Total being uploaded localfiles num: %s" % (PUT_OK + PUT_SKIP + PUT_FAIL)
print "This time OK num:%s, SKIP num:%s, FAIL num:%s" % (PUT_OK, PUT_SKIP, PUT_FAIL)
print "Total file num:%s, OK file num:%s" %(FILE_NUM_TOTAL, FILE_NUM_OK)
if PUT_FAIL != 0:
print "FailUploadList:"
for i in set(ERROR_FILE_LIST):
print i
if options.out:
try:
f = open(options.out, "w")
for i in set(ERROR_FILE_LIST):
f.write("%s\n" % i.strip())
f.close()
print "FailUploadList is written into %s" % options.out
except:
print "write upload failed file exception"
print sys.exc_info()[0], sys.exc_info()[1]
exit(-1)
def cmd_upload_object_from_localdir(args, options):
check_args(3, args)
path = args[2]
(bucket, object) = parse_bucket_object(path)
check_bucket(bucket)
local_path = args[1]
if not os.path.isdir(local_path):
print "%s is not dir, please input localdir" % local_path
exit(-1)
if not local_path.endswith(os.sep):
local_path = "%s%s" % (local_path, os.sep)
is_check_point = False
check_point_file = ""
file_time_map = {}
if options.check_point:
is_check_point = True
check_point_file = options.check_point
file_time_map = load_check_point(check_point_file)
prefix = object
is_replace = False
if options.replace:
if options.replace.lower() == "true":
is_replace = True
is_check_md5 = False
if options.check_md5:
if options.check_md5.lower() == "true":
is_check_md5 = True
thread_num = 5
if options.thread_num:
thread_num = (int)(options.thread_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
multipart_threshold = 100*1024*1024
if options.multipart_threshold:
multipart_threshold = (int)(options.multipart_threshold)
total_upload_num = 0
topdown = True
def process_localfile(items):
queue = Queue.Queue(0)
for item in items:
local_file = os.path.join(root, item)
if os.path.isfile(local_file):
local_file_full_path = os.path.abspath(local_file)
local_file_full_path = format_utf8(local_file_full_path)
local_modify_time = 0
if is_check_point and file_time_map.has_key(local_file_full_path):
local_modify_time = (int)(os.path.getmtime(local_file))
record_modify_time = file_time_map[local_file_full_path]
if local_modify_time <= record_modify_time:
continue
object = get_object_name(smart_code(local_file), smart_code(local_path))
if prefix:
if prefix.endswith("/"):
object = "%s%s" % (prefix, object)
else:
object = "%s/%s" % (prefix, object)
content_type = ''
queue.put((put_object, bucket, object, local_file, local_modify_time, is_replace, is_check_md5, content_type, multipart_threshold))
qsize = queue.qsize()
thread_pool = []
global TOTAL_PUT
TOTAL_PUT += qsize
for i in xrange(thread_num):
current = UploadObjectWorker(check_point_file, retry_times, queue)
thread_pool.append(current)
current.start()
queue.join()
for item in thread_pool:
item.join()
return qsize
for root, dirs, files in os.walk(local_path, topdown):
total_upload_num += process_localfile(files)
total_upload_num += process_localfile(dirs)
global PUT_OK
global PUT_SKIP
global PUT_FAIL
print ""
print "Total being uploaded localfiles num: %s" % (PUT_OK + PUT_SKIP + PUT_FAIL)
print "OK num:%s, SKIP num:%s, FAIL num:%s" % (PUT_OK, PUT_SKIP, PUT_FAIL)
if PUT_FAIL != 0:
exit(-1)
def get_object_list_marker_from_xml(body):
#return ([(object, object_length, last_modify_time)...], marker)
object_meta_list = []
next_marker = ""
hh = GetBucketXml(body)
(fl, pl) = hh.list()
if len(fl) != 0:
for i in fl:
object = convert_utf8(i[0])
last_modify_time = i[1]
length = i[3]
object_meta_list.append((object, length, last_modify_time))
if hh.is_truncated:
next_marker = hh.nextmarker
return (object_meta_list, next_marker)
def cmd_deleteallobject(args, options):
if len(args) == 1:
return cmd_getallbucket(args, options)
path = args[1]
(bucket, object) = parse_bucket_object(path)
if len(bucket) == 0:
return cmd_getallbucket(args, options)
force_delete = False
if options.force and options.force.lower() == "true":
force_delete = True
if not force_delete:
ans = raw_input("DELETE all objects? Y/N, default is N: ")
if ans.lower() != "y":
print "quit."
exit(-1)
prefix = object
marker = ''
delimiter = ''
maxkeys = '1000'
if options.marker:
marker = options.marker
if options.delimiter:
delimiter = options.delimiter
if options.maxkeys:
maxkeys = options.maxkeys
debug = True
if not delete_all_objects(get_oss(), bucket, prefix, delimiter, marker, maxkeys, debug):
exit(-1)
def cmd_getallbucket(args, options):
width = 20
print "%s %s %s" % ("CreateTime".ljust(width), "BucketLocation".ljust(width), "BucketName".ljust(width))
marker = ""
prefix = ""
headers = None
total_num = 0
while 1:
res = get_oss().get_service(headers, prefix, marker)
if (res.status / 100) == 2:
body = res.read()
(bucket_meta_list, marker) = get_bucket_meta_list_marker_from_xml(body)
for i in bucket_meta_list:
print "%s %s %s" % (str(convert_to_localtime(i.creation_date)).ljust(width), i.location.ljust(width), i.name)
total_num += 1
else:
break
if not marker:
break
print "\nBucket Number is: %s" % total_num
return res
def cmd_createbucket(args, options):
check_args(2, args)
if options.acl is not None and options.acl not in ACL_LIST:
print "acl invalid, SHOULD be one of %s" % (ACL_LIST)
sys.exit(1)
acl = ''
if options.acl:
acl = options.acl
bucket = parse_bucket(args[1])
if options.location is not None:
location = options.location
return get_oss().put_bucket_with_location(bucket, acl, location)
return get_oss().put_bucket(bucket, acl)
def cmd_getbucketlocation(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_bucket_location(bucket)
if res.status / 100 == 2:
body = res.read()
h = GetBucketLocationXml(body)
print h.location
return res
def cmd_deletebucket(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
return get_oss().delete_bucket(bucket)
def cmd_deletewholebucket(args, options):
check_args(2, args)
ans = raw_input("DELETE whole bucket? Y/N, default is N: ")
if ans.lower() != "y":
print "quit."
exit(-1)
bucket = parse_bucket(args[1])
debug = True
delete_marker = ""
delete_upload_id_marker = ""
if options.marker:
delete_marker = options.marker
if options.upload_id:
delete_upload_id_marker = options.upload_id
if not clear_all_objects_in_bucket(get_oss(), bucket, delete_marker, delete_upload_id_marker, debug):
exit(-1)
def delete_object(bucket, object, retry_times=2):
object = smart_code(object)
global DELETE_OK
ret = False
for i in xrange(retry_times):
try:
oss = get_oss()
res = oss.delete_object(bucket, object)
if 2 == res.status / 100:
ret = True
if ret:
DELETE_OK += 1
print "delete %s/%s OK" % (bucket, object)
return ret
else:
print "delete %s/%s FAIL, status:%s, request-id:%s" % (bucket, object, res.status, res.getheader("x-oss-request-id"))
except:
print "delete %s/%s exception" % (bucket, object)
print sys.exc_info()[0], sys.exc_info()[1]
return False
class DeleteObjectWorker(threading.Thread):
def __init__(self, retry_times, queue):
threading.Thread.__init__(self)
self.queue = queue
self.retry_times = retry_times
def run(self):
while 1:
try:
(delete_object, bucket, object) = self.queue.get(block=False)
delete_object(bucket, object, self.retry_times)
self.queue.task_done()
except Queue.Empty:
break
except:
self.queue.task_done()
def cmd_deletebyfile(args, options):
check_args(2, args)
localfile = args[1]
check_localfile(localfile)
queue = Queue.Queue(0)
f = open(localfile)
for line in f:
line = line.strip()
(bucket, object) = parse_bucket_object(line)
if len(bucket) != 0 and len(object) != 0:
queue.put((delete_object, bucket, object))
f.close()
thread_num = 5
if options.thread_num:
thread_num = (int)(options.thread_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
thread_pool = []
for i in xrange(thread_num):
current = DeleteObjectWorker(retry_times, queue)
thread_pool.append(current)
current.start()
queue.join()
for item in thread_pool:
item.join()
def cmd_setacl(args, options):
check_args(2, args)
if options.acl is None or options.acl not in ACL_LIST:
print "acl invalid, SHOULD be one of %s" % (ACL_LIST)
sys.exit(1)
bucket = parse_bucket(args[1])
return get_oss().put_bucket(bucket, options.acl)
def cmd_getacl(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_bucket_acl(bucket)
if (res.status / 100) == 2:
body = res.read()
h = GetBucketAclXml(body)
print h.grant
return res
def to_http_headers(string):
headers_map = {}
for i in string.split('#'):
key_value_list = i.strip().split(':')
if len(key_value_list) >= 2:
headers_map[key_value_list[0]] = ":".join(key_value_list[1:])
return headers_map
def cmd_mkdir(args, options):
check_args(2, args)
if not args[1].endswith('/'):
args[1] += '/'
(bucket, object) = parse_bucket_object(args[1])
res = get_oss().put_object_from_string(bucket, object, "")
return res
def handler(signum, frame):
print 'Signal handler called with signal', signum
raise Exception("timeout")
try:
signal.signal(signal.SIGALRM, handler)
except:
pass
def cmd_put(args, options):
check_args(3, args)
localfile = args[1]
check_localfile(localfile)
if os.path.getsize(localfile) > MAX_OBJECT_SIZE:
print "locafile:%s is bigger than %s, it is not support by put, please use multiupload instead." % (localfile, MAX_OBJECT_SIZE)
exit(-1)
#user specified objectname oss://bucket/[path]/object
(bucket, object) = parse_bucket_object(args[2])
if len(object) == 0:
# e.g. upload to oss://bucket/
object = os.path.basename(localfile)
elif object.endswith("/"):
#e.g. uplod to oss://bucket/a/b/
object += os.path.basename(localfile)
content_type = ""
headers = {}
if options.content_type:
content_type = options.content_type
if options.headers:
headers = to_http_headers(options.headers)
if options.check_md5:
if options.check_md5.lower() == "true":
md5string, base64md5 = get_file_md5(localfile)
headers["Content-MD5"] = base64md5
headers["x-oss-meta-md5"] = md5string
timeout = 0
if options.timeout:
timeout = (int)(options.timeout)
print "timeout", timeout
try:
signal.alarm(timeout)
except:
pass
res = get_oss().put_object_from_file(bucket, object, localfile, content_type, headers)
try:
signal.alarm(0) # Disable the signal
except:
pass
if res.status == 200:
print_url(OSS_HOST, bucket, object, res)
return res
def print_url(host, bucket, object, res):
print ""
second_level_domain = OSS_HOST
orginal_object = object
object = oss_quote(object)
if check_bucket_valid(bucket) and not is_ip(second_level_domain):
if is_oss_host(second_level_domain):
print "Object URL is: http://%s.%s/%s" % (bucket, second_level_domain, object)
else:
print "Object URL is: http://%s/%s" % (second_level_domain, object)
else:
print "Object URL is: http://%s/%s/%s" % (second_level_domain, bucket, object)
print "Object abstract path is: oss://%s/%s" % (bucket, orginal_object)
header_map = convert_header2map(res.getheaders())
print "ETag is %s " % safe_get_element("etag", header_map)
def cmd_upload(args, options):
check_args(3, args)
localfile = args[1]
check_localfile(localfile)
multipart_threshold = 100*1024*1024
if options.multipart_threshold:
multipart_threshold = (int)(options.multipart_threshold)
localfile_size = os.path.getsize(localfile)
if localfile_size > multipart_threshold or localfile_size > MAX_OBJECT_SIZE:
return cmd_multi_upload(args, options)
return cmd_put(args, options)
def cmd_upload_group(args, options):
check_args(3, args)
localfile = args[1]
check_localfile(localfile)
#user specified objectname oss://bucket/[path]/object
(bucket, object) = parse_bucket_object(args[2])
if len(object) == 0:
# e.g. upload to oss://bucket/
object = os.path.basename(localfile)
elif object.endswith("/"):
#e.g. uplod to oss://bucket/a/b/
object += os.path.basename(localfile)
headers = {}
content_type = ''
if options.headers:
headers = to_http_headers(options.headers)
if options.content_type:
content_type = options.content_type
headers['Content-Type'] = content_type
thread_num = 10
if options.thread_num:
thread_num = (int)(options.thread_num)
max_part_num = 1000
if options.max_part_num:
max_part_num = (int)(options.max_part_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
oss = get_oss()
oss.set_retry_times(retry_times)
res = oss.upload_large_file(bucket, object, localfile, thread_num, max_part_num, headers)
if res.status == 200:
print_url(OSS_HOST, bucket, object, res)
return res
def cmd_multi_upload(args, options):
check_args(3, args)
localfile = args[1]
check_localfile(localfile)
#user specified objectname oss://bucket/[path]/object
(bucket, object) = parse_bucket_object(args[2])
is_check_md5 = False
if len(object) == 0:
# e.g. upload to oss://bucket/
object = os.path.basename(localfile)
elif object.endswith("/"):
#e.g. uplod to oss://bucket/a/b/
object += os.path.basename(localfile)
headers = {}
if options.headers:
headers = to_http_headers(options.headers)
thread_num = 10
if options.thread_num:
thread_num = (int)(options.thread_num)
max_part_num = 1000
if options.max_part_num:
max_part_num = (int)(options.max_part_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
if options.check_md5:
if options.check_md5.lower() == "true":
is_check_md5 = True
md5string, base64md5 = get_file_md5(localfile)
headers["x-oss-meta-md5"] = md5string
oss = get_oss()
oss.set_retry_times(retry_times)
upload_id = ""
if options.upload_id:
upload_id = options.upload_id
res = oss.get_all_parts(bucket, object, upload_id, max_parts=1)
if res.status != 200:
return res
if not upload_id:
upload_ids = []
upload_ids = get_upload_id_list(oss, bucket, object)
if upload_ids:
upload_ids = sorted(upload_ids)
upload_id = upload_ids[0]
res = oss.multi_upload_file(bucket, object, localfile, upload_id, thread_num, max_part_num, headers, debug=True, check_md5=is_check_md5)
if res.status == 200:
print_url(OSS_HOST, bucket, object, res)
return res
def cmd_copy(args, options):
check_args(3, args)
(bucket_source, object_source) = parse_bucket_object(args[1])
check_bucket_object(bucket_source, object_source)
(bucket, object) = parse_bucket_object(args[2])
check_bucket_object(bucket, object)
content_type = ""
headers = {}
if options.headers:
headers = to_http_headers(options.headers)
if options.content_type:
content_type = options.content_type
headers['Content-Type'] = content_type
res = get_oss().copy_object(bucket_source, object_source, bucket, object, headers)
if res.status == 200:
print_url(OSS_HOST, bucket, object, res)
return res
def cmd_upload_part_copy(args, options):
check_args(3, args)
(bucket_source, object_source) = parse_bucket_object(args[1])
check_bucket_object(bucket_source, object_source)
(bucket, object) = parse_bucket_object(args[2])
check_bucket_object(bucket, object)
#head object to get object size
headers = {}
res = get_oss().head_object(bucket_source, object_source, headers = headers)
if res.status != 200:
print 'copy large file fail because head object fail, status:%s' %(res.status)
sys.exit(-1)
content_len = (int)(res.getheader('Content-Length'))
etag = res.getheader('ETag')
#get part size
default_part_size = 10 * 1024 * 1024
part_size = default_part_size
max_part_num=10000
min_part_size = 5 * 1024 * 1024
if options.part_size:
part_size = (int)(eval(options.part_size))
if part_size < min_part_size:
print 'part size too small, change part size to %s' %(default_part_size)
part_size = default_part_size
if part_size * max_part_num < content_len:
part_size = (content_len + max_part_num - content_len % max_part_num) / max_part_num
print 'part num more than max part num %s, change part size to %s' %(max_part_num, part_size)
if content_len % part_size:
part_size_list = [part_size] * (content_len / part_size) + [ content_len % part_size]
else:
part_size_list = [part_size] * (content_len / part_size)
#get upload id
if options.upload_id:
upload_id = options.upload_id
else:
res = get_oss().init_multi_upload(bucket, object)
if res.status != 200:
print 'copy large file fail because init multipart upload fail, status:%s' %(res.status)
sys.exit(-1)
upload_id = GetInitUploadIdXml(res.read()).upload_id
#upload part copy
start = 0
part_number = 1
for part_size in part_size_list:
headers = {'x-oss-copy-source-range': ('bytes=%d-%d' % (start, start + part_size-1))}
headers['x-oss-copy-source-if-match'] = etag
res = get_oss().copy_object_as_part(bucket_source, object_source, bucket, object, upload_id, part_number, headers)
if res.status != 200:
print 'copy large file fail because upload part copy fail, status:%s, upload_id:%s' %(res.status, upload_id)
sys.exit(-1)
start += part_size
part_number += 1
#complete multipart upload
part_xml = get_part_xml(get_oss(), bucket, object, upload_id)
res = get_oss().complete_upload(bucket, object, upload_id, part_xml)
if res.status != 200:
print 'copy large file fail because complete multipart upload fail, status:%s, upload_id:%s' %(res.status, upload_id)
sys.exit(-1)
else:
print_url(OSS_HOST, bucket, object, res)
return res
def copy_object(src_bucket, src_object, des_bucket, des_object, headers, replace, retry_times = 3):
global COPY_OK
if COPY_OK > 0 and COPY_OK % 100 == 0:
print "%s objects are copied OK, marker is:%s" % (COPY_OK, src_object)
for i in xrange(retry_times):
tmp_headers = headers.copy()
try:
if replace:
res = get_oss().copy_object(src_bucket, src_object, des_bucket, des_object, tmp_headers)
if res.status == 200:
COPY_OK += 1
return True
else:
print "copy /%s/%s to /%s/%s FAIL, status:%s, request-id:%s" % \
(src_bucket, src_object, des_bucket, des_object, res.status, res.getheader("x-oss-request-id"))
else:
res = get_oss().head_object(des_bucket, des_object)
if res.status == 200:
COPY_OK += 1
return True
elif res.status == 404:
res = get_oss().copy_object(src_bucket, src_object, des_bucket, des_object, tmp_headers)
if res.status == 200:
COPY_OK += 1
return True
else:
print "copy /%s/%s to /%s/%s FAIL, status:%s, request-id:%s" % \
(src_bucket, src_object, des_bucket, des_object, res.status, res.getheader("x-oss-request-id"))
except:
print "copy /%s/%s to /%s/%s exception" % (src_bucket, src_object, des_bucket, des_object)
print sys.exc_info()[0], sys.exc_info()[1]
try:
res = get_oss().head_object(src_bucket, src_object)
if res.status == 200:
length = (int)(res.getheader('content-length'))
max_length = 1*1024*1024*1024
if length > max_length:
print "/%s/%s is bigger than %s, copy may fail. skip this one." \
% (src_bucket, src_object, max_length)
print "please use get command to download the object and then use multiupload command to upload the object."
return False
except:
print sys.exc_info()[0], sys.exc_info()[1]
pass
sleep_time = 300
print "sleep %s" % sleep_time
time.sleep(sleep_time)
print "copy /%s/%s to /%s/%s FAIL" % (src_bucket, src_object, des_bucket, des_object)
return False
class CopyObjectWorker(threading.Thread):
def __init__(self, retry_times, queue):
threading.Thread.__init__(self)
self.queue = queue
self.retry_times = retry_times
def run(self):
while 1:
try:
(copy_object, src_bucket, src_object, des_bucket, des_object, replace, headers) = self.queue.get(block=False)
copy_object(src_bucket, src_object, des_bucket, des_object, headers, replace, self.retry_times)
self.queue.task_done()
except Queue.Empty:
break
except:
self.queue.task_done()
def cmd_copy_bucket(args, options):
check_args(3, args)
(src_bucket, src_prefix) = parse_bucket_object(args[1])
(des_bucket, des_prefix) = parse_bucket_object(args[2])
if des_prefix and not des_prefix.endswith("/"):
des_prefix = "%s/" % des_prefix
thread_num = 5
if options.thread_num:
thread_num = (int)(options.thread_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
replace = False
if options.replace is not None and options.replace.lower() == "true":
replace = True
marker = ""
if options.marker:
marker = options.marker
headers = {}
if options.headers:
headers = to_http_headers(options.headers)
delimiter = ''
maxkeys = '1000'
handled_obj_num = 0
while 1:
queue = Queue.Queue(0)
res = get_oss().get_bucket(src_bucket, src_prefix, marker, delimiter, maxkeys)
if res.status != 200:
return res
body = res.read()
(tmp_object_list, marker) = get_object_list_marker_from_xml(body)
for i in tmp_object_list:
object = i[0]
length = i[1]
last_modify_time = i[2]
if str(length) == "0" and object.endswith("/"):
continue
handled_obj_num += 1
src_object = smart_code(object)
tmp_object = src_object
if src_prefix.endswith("/"):
if src_prefix == object[:len(src_prefix)]:
tmp_object = object[len(src_prefix):]
while 1:
if not tmp_object.startswith("/"):
break
tmp_object = tmp_object[1:]
if des_prefix:
des_object = "%s%s" % (des_prefix, tmp_object)
else:
des_object = tmp_object
queue.put((copy_object, src_bucket, src_object, des_bucket, des_object, replace, headers))
#copy_object(src_bucket, src_object, des_bucket, des_object, replace)
thread_pool = []
for i in xrange(thread_num):
current = CopyObjectWorker(retry_times, queue)
thread_pool.append(current)
current.start()
queue.join()
for item in thread_pool:
item.join()
if len(marker) == 0:
break
print "Total being copied objects num: %s, from /%s/%s to /%s/%s" % \
(handled_obj_num, src_bucket, src_prefix, des_bucket, des_prefix)
global COPY_OK
print "OK num:%s" % COPY_OK
print "FAIL num:%s" % (handled_obj_num - COPY_OK)
def continue_get(bucket, object, localfile, headers=None, retry_times=3):
length = -1
local_length = -2
tmp_headers = {}
header_map = {}
if headers:
tmp_headers = headers.copy()
try:
res = get_oss().head_object(bucket, object, tmp_headers)
if 200 == res.status:
length = (int)(res.getheader('content-length'))
header_map = convert_header2map(res.getheaders())
else:
print "can not get the length of object:", object
return False
except:
print sys.exc_info()[0], sys.exc_info()[1]
return False
endpos = length - 1
for i in xrange(retry_times):
curpos = 0
range_info = 'bytes=%d-%d' % (curpos, endpos)
if os.path.isfile(localfile):
local_length = os.path.getsize(localfile)
if i == 0 and header_map.has_key('x-oss-meta-md5'):
oss_md5_string = header_map['x-oss-meta-md5']
local_md5_string, base64_md5 = get_file_md5(localfile)
if local_md5_string.lower() == oss_md5_string.lower():
return True
else:
os.remove(localfile)
elif local_length == length:
#print "localfile:%s exists and length is equal. please check if it is ok. you can remove it first and download again." % localfile
return True
elif local_length < length:
if i == 0:
os.remove(localfile)
else:
curpos = local_length
range_info = 'bytes=%d-%d' % (curpos, endpos)
print "localfile:%s exists and length is:%s, continue to download. range:%s." % (localfile, local_length, range_info)
else:
os.remove(localfile)
file = open(localfile, "ab+")
tmp_headers = {}
if headers:
tmp_headers = headers.copy()
tmp_headers['Range'] = range_info
file.seek(curpos)
is_read_ok = False
oss_md5_string = ''
try:
res = get_oss().get_object(bucket, object, tmp_headers)
if res.status/100 == 2:
header_map = convert_header2map(res.getheaders())
if header_map.has_key('x-oss-meta-md5'):
oss_md5_string = header_map['x-oss-meta-md5']
while True:
content = res.read(RECV_BUF_SIZE)
if content:
file.write(content)
curpos += len(content)
else:
break
is_read_ok = True
else:
print "range get /%s/%s [%s] ret:%s, request-id:%s" % (bucket, object, range_info, res.status, res.getheader("x-oss-request-id"))
except:
print "range get /%s/%s [%s] exception" % (bucket, object, range_info)
print sys.exc_info()[0], sys.exc_info()[1]
file.flush()
file.close()
file_opened = False
continue
file.flush()
file.close()
if os.path.isfile(localfile):
local_length = os.path.getsize(localfile)
if is_read_ok and length == local_length:
if oss_md5_string != '':
md5string, base64md5 = get_file_md5(localfile)
if md5string.lower() != oss_md5_string.lower():
print "The object %s is download to %s failed. file md5 is incorrect." % (object, localfile)
return False
return True
else:
print "The object %s is download to %s failed. file length is incorrect.length is:%s local_length:%s" % (object, localfile, length, local_length)
return False
def cmd_get(args, options):
check_args(3, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
localfile = args[2]
localfile = smart_code(localfile)
headers = {}
if options.headers:
headers = to_http_headers(options.headers)
if options.continue_download:
retry_times = 3
res = continue_get(bucket, object, localfile, headers, retry_times)
else:
tmp_headers = {}
tmp_headers = headers.copy()
res = get_oss().get_object_to_file(bucket, object, localfile, headers=tmp_headers)
if res.status/100 == 2:
header_map = convert_header2map(res.getheaders())
if header_map.has_key('x-oss-meta-md5'):
oss_md5string = header_map['x-oss-meta-md5']
md5string, base64md5 = get_file_md5(localfile)
if md5string.lower() != oss_md5string.lower():
print "The object %s is download to %s failed. file md5 is incorrect." % (object, localfile)
sys.exit(1)
else:
content_length = int(header_map['content-length'])
local_length = os.path.getsize(localfile)
if content_length != local_length:
print "The object %s is download to %s failed. file length is incorrect." % (object, localfile)
sys.exit(1)
else:
return res
if res:
print "The object %s is downloaded to %s, please check." % (object, localfile)
return res
def cmd_multi_get(args, options):
check_args(3, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
localfile = args[2]
localfile = smart_code(localfile)
thread_num = 5
if options.thread_num:
thread_num = (int)(options.thread_num)
retry_times = MAX_RETRY_TIMES
if options.retry_times:
retry_times = (int)(options.retry_times)
show_bar = False
oss = get_oss(show_bar)
ret = multi_get(oss, bucket, object, localfile, thread_num, retry_times)
if ret:
print "The object %s is downloaded to %s, please check." % (object, localfile)
else:
print "Download object:%s failed!" % (object)
exit(-1)
def cmd_cat(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
res = get_oss().get_object(bucket, object)
if res.status == 200:
data = ""
while 1:
data = res.read(10240)
if len(data) != 0:
print data
else:
break
return res
def cmd_meta(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
headers = {}
res = get_oss().head_object(bucket, object, headers = headers)
if res.status == 200:
header_map = convert_header2map(res.getheaders())
width = 16
print "%s: %s" % ("objectname".ljust(width), object)
for key, value in header_map.items():
print "%s: %s" % (key.ljust(width), value)
return res
def cmd_info(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
res = get_oss().get_object_info(bucket, object)
if res.status == 200:
print res.read()
return res
def cmd_delete(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
return get_oss().delete_object(bucket, object)
def cmd_cancel(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
if options.upload_id is None:
print "upload_id invalid, please set with --upload_id=xxx"
sys.exit(1)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
return get_oss().cancel_upload(bucket, object, options.upload_id)
def cmd_sign_url(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
if options.timeout:
timeout = options.timeout
else:
timeout = "600"
print "timeout is %s seconds." % timeout
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
method = 'GET'
print get_oss().sign_url(method, bucket, object, int(timeout))
def cmd_configure(args, options):
if options.accessid is None or options.accesskey is None:
print "%s miss parameters, use --accessid=[accessid] --accesskey=[accesskey] to specify id/key pair" % args[0]
sys.exit(-1)
config = ConfigParser.RawConfigParser()
config.add_section(CONFIGSECTION)
if options.host is not None:
config.set(CONFIGSECTION, 'host', options.host)
config.set(CONFIGSECTION, 'accessid', options.accessid)
config.set(CONFIGSECTION, 'accesskey', options.accesskey)
if options.sts_token:
config.set(CONFIGSECTION, 'sts_token', options.sts_token)
cfgfile = open(CONFIGFILE, 'w+')
config.write(cfgfile)
print "Your configuration is saved into %s ." % CONFIGFILE
cfgfile.close()
import stat
os.chmod(CONFIGFILE, stat.S_IREAD | stat.S_IWRITE)
def cmd_help(args, options):
print HELP
def cmd_create_link(args, options):
check_args(3, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
object_list = args[2:]
return get_oss().create_link_from_list(bucket, object, object_list)
def cmd_create_link_from_file(args, options):
check_args(3, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
local_file = args[2]
if not os.path.isfile(local_file):
print "no such file:%s" % local_file
exit(-1)
f = open(local_file)
object_list = f.readlines()
f.close()
return get_oss().create_link_from_list(bucket, object, object_list)
def cmd_get_link_index(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
res = get_oss().get_link_index(bucket, object)
if res.status == 200:
print res.read()
return res
def cmd_create_group_from_file(args, options):
check_args(3, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
local_file = args[2]
if not os.path.isfile(local_file):
print "no such file:%s" % local_file
exit(-1)
f = open(local_file)
object_list = f.readlines()
f.close()
part_msg_list = []
for i in range(len(object_list)):
object_list[i] = object_list[i].rstrip('\n')
res = get_oss().head_object(bucket, object_list[i])
if res.status != 200:
print "head object: ", object_list[i], ", ", res.status
print 'Create Group Fail!'
return res
header_map = convert_header2map(res.getheaders())
etag = safe_get_element("etag", header_map)
etag = etag.replace("\"", "")
list = [str(i), object_list[i], etag]
part_msg_list.append(list)
object_group_msg_xml = create_object_group_msg_xml(part_msg_list)
return get_oss().post_object_group(bucket, object, object_group_msg_xml)
def cmd_create_group(args, options):
check_args(3, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
object_list = args[2:]
part_msg_list = []
for i in range(len(object_list)):
res = get_oss().head_object(bucket, object_list[i])
if res.status != 200:
print "head object: ", object_list[i], ", ", res.status
print 'Create Group Fail!'
return res
header_map = convert_header2map(res.getheaders())
etag = safe_get_element("etag", header_map)
etag = etag.replace("\"", "")
list = [str(i), object_list[i], etag]
part_msg_list.append(list)
object_group_msg_xml = create_object_group_msg_xml(part_msg_list)
return get_oss().post_object_group(bucket, object, object_group_msg_xml)
def cmd_get_group_index(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
check_bucket_object(bucket, object)
res = get_oss().get_object_group_index(bucket, object)
if res.status == 200:
print res.read()
return res
def cmd_put_bucket_logging(args, options):
source_bucket = ''
target_bucket = ''
prefix = ''
check_args(2, args)
if len(args) >= 3:
target_bucket = args[2]
(target_bucket, prefix) = parse_bucket_object(args[2])
source_bucket = parse_bucket(args[1])
target_bucket = parse_bucket(args[2])
res = get_oss().put_logging(source_bucket, target_bucket, prefix)
return res
def cmd_get_bucket_logging(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_logging(bucket)
if res.status == 200:
print res.read()
return res
def cmd_put_bucket_website(args, options):
bucket = ''
indexfile = ''
errorfile = ''
check_args(3, args)
if len(args) >= 3:
indexfile = args[2]
if len(args) >= 4:
errorfile = args[3]
bucket = parse_bucket(args[1])
res = get_oss().put_website(bucket, indexfile, errorfile)
return res
def cmd_get_bucket_website(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_website(bucket)
if res.status == 200:
print res.read()
return res
def cmd_delete_bucket_website(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().delete_website(bucket)
return res
def cmd_delete_bucket_logging(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().delete_logging(bucket)
return res
def cmd_put_bucket_cors(args, options):
check_args(3, args)
bucket = parse_bucket(args[1])
local_file = args[2]
if not os.path.isfile(local_file):
print "no such file:%s" % local_file
exit(-1)
f = open(local_file)
content = f.read()
f.close()
return get_oss().put_cors(bucket, content)
def cmd_get_bucket_cors(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_cors(bucket)
if res.status == 200:
print res.read()
return res
def cmd_delete_bucket_cors(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().delete_cors(bucket)
return res
def cmd_options(args, options):
check_args(2, args)
(bucket, object) = parse_bucket_object(args[1])
headers = {}
is_ok = True
if options.origin:
headers['Origin'] = options.origin
else:
is_ok = False
method_list = ["GET", "PUT", "DELETE", "HEAD", "POST"]
if options.method:
if options.method not in method_list:
is_ok = False
else:
headers['Access-Control-Request-Method'] = options.method
else:
is_ok = False
if not is_ok:
print "please set origin and method with --origin=xxx --method=xxx, the value of --method SHOULD be one of %s" % (" ".join(method_list))
exit(-1)
res = get_oss().options(bucket, object, headers)
return res
def cmd_put_bucket_lifecycle(args, options):
check_args(3, args)
bucket = parse_bucket(args[1])
local_file = args[2]
if not os.path.isfile(local_file):
print "no such file:%s" % local_file
exit(-1)
f = open(local_file)
lifecycle_config = f.read()
f.close()
res = get_oss().put_lifecycle(bucket, lifecycle_config)
return res
def cmd_get_bucket_lifecycle(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_lifecycle(bucket)
if res.status == 200:
print res.read()
return res
def cmd_put_bucket_referer(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
allow_empty_referer = True
if options.allow_empty_referer and options.allow_empty_referer.lower() == "false":
allow_empty_referer = False
referer_list = []
if options.referer:
referer_list = options.referer.split(",")
res = get_oss().put_referer(bucket, allow_empty_referer, referer_list)
return res
def cmd_get_bucket_referer(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().get_referer(bucket)
if res.status == 200:
print res.read()
return res
def cmd_delete_bucket_lifecycle(args, options):
check_args(2, args)
bucket = parse_bucket(args[1])
res = get_oss().delete_lifecycle(bucket)
return res
def get_oss(show_bar = True):
oss = OssAPI(OSS_HOST, ID, KEY, sts_token=STS_TOKEN)
oss.show_bar = show_bar
oss.set_send_buf_size(SEND_BUF_SIZE)
oss.set_recv_buf_size(RECV_BUF_SIZE)
oss.set_debug(IS_DEBUG)
return oss
def setup_credentials(options):
config = ConfigParser.ConfigParser()
try:
config.read(CONFIGFILE)
global OSS_HOST
global ID
global KEY
global STS_TOKEN
try:
OSS_HOST = config.get(CONFIGSECTION, 'host')
except Exception:
OSS_HOST = DEFAUL_HOST
ID = config.get(CONFIGSECTION, 'accessid')
KEY = config.get(CONFIGSECTION, 'accesskey')
try:
STS_TOKEN = config.get(CONFIGSECTION, 'sts_token')
except:
pass
if options.accessid is not None:
ID = options.accessid
if options.accesskey is not None:
KEY = options.accesskey
if options.sts_token is not None:
STS_TOKEN = options.sts_token
if options.host is not None:
OSS_HOST = options.host
except Exception:
if options.accessid is not None:
ID = options.accessid
if options.accesskey is not None:
KEY = options.accesskey
if options.sts_token is not None:
STS_TOKEN = options.sts_token
if options.host is not None:
OSS_HOST = options.host
if len(ID) == 0 or len(KEY) == 0:
print "can't get accessid/accesskey, setup use : config --accessid=accessid --accesskey=accesskey"
sys.exit(1)
def setup_cmdlist():
CMD_LIST['GetAllBucket'] = cmd_getallbucket
CMD_LIST['CreateBucket'] = cmd_createbucket
CMD_LIST['DeleteBucket'] = cmd_deletebucket
CMD_LIST['DeleteWholeBucket'] = cmd_deletewholebucket
CMD_LIST['DeleteByFile'] = cmd_deletebyfile
CMD_LIST['GetBucketLocation'] = cmd_getbucketlocation
CMD_LIST['GetAcl'] = cmd_getacl
CMD_LIST['SetAcl'] = cmd_setacl
CMD_LIST['List'] = cmd_listing
CMD_LIST['MkDir'] = cmd_mkdir
CMD_LIST['Init'] = cmd_init_upload
CMD_LIST['UploadPartFromString'] = cmd_upload_part_from_string
CMD_LIST['UploadPartFromFile'] = cmd_upload_part_from_file
CMD_LIST['ListPart'] = cmd_listpart
CMD_LIST['ListParts'] = cmd_listparts
CMD_LIST['GetAllPartSize'] = cmd_getallpartsize
CMD_LIST['ListAllObject'] = cmd_listallobject
CMD_LIST['ListAllDir'] = cmd_listalldir
CMD_LIST['DownloadAllObject'] = cmd_downloadallobject
CMD_LIST['UploadObjectFromLocalDir'] = cmd_upload_object_from_localdir
CMD_LIST['UploadDisk'] = cmd_upload_disk
CMD_LIST['DeleteAllObject'] = cmd_deleteallobject
CMD_LIST['Put'] = cmd_put
CMD_LIST['Copy'] = cmd_copy
CMD_LIST['CopyLargeFile'] = cmd_upload_part_copy
CMD_LIST['CopyBucket'] = cmd_copy_bucket
CMD_LIST['Upload'] = cmd_upload
CMD_LIST['UploadGroup'] = cmd_upload_group
CMD_LIST['MultiUpload'] = cmd_multi_upload
CMD_LIST['Get'] = cmd_get
CMD_LIST['MultiGet'] = cmd_multi_get
CMD_LIST['Cat'] = cmd_cat
CMD_LIST['Meta'] = cmd_meta
CMD_LIST['Info'] = cmd_info
CMD_LIST['Delete'] = cmd_delete
CMD_LIST['Cancel'] = cmd_cancel
CMD_LIST['Config'] = cmd_configure
CMD_LIST['Help'] = cmd_help
CMD_LIST['SignUrl'] = cmd_sign_url
CMD_LIST['CreateLink'] = cmd_create_link
CMD_LIST['CreateLinkFromFile'] = cmd_create_link_from_file
CMD_LIST['GetLinkIndex'] = cmd_get_link_index
CMD_LIST['CreateGroup'] = cmd_create_group
CMD_LIST['CreateGroupFromFile'] = cmd_create_group_from_file
CMD_LIST['GetGroupIndex'] = cmd_get_group_index
CMD_LIST['PutBucketLogging'] = cmd_put_bucket_logging
CMD_LIST['GetBucketLogging'] = cmd_get_bucket_logging
CMD_LIST['DeleteBucketLogging'] = cmd_delete_bucket_logging
CMD_LIST['PutBucketWebsite'] = cmd_put_bucket_website
CMD_LIST['GetBucketWebsite'] = cmd_get_bucket_website
CMD_LIST['DeleteBucketWebsite'] = cmd_delete_bucket_website
CMD_LIST['PutBucketCors'] = cmd_put_bucket_cors
CMD_LIST['GetBucketCors'] = cmd_get_bucket_cors
CMD_LIST['DeleteBucketCors'] = cmd_delete_bucket_cors
CMD_LIST['Options'] = cmd_options
CMD_LIST['PutBucketLifeCycle'] = cmd_put_bucket_lifecycle
CMD_LIST['GetBucketLifeCycle'] = cmd_get_bucket_lifecycle
CMD_LIST['DeleteBucketLifeCycle'] = cmd_delete_bucket_lifecycle
CMD_LIST['PutBucketReferer'] = cmd_put_bucket_referer
CMD_LIST['GetBucketReferer'] = cmd_get_bucket_referer
def getSuitableKeyValues(keyValues):
newMap = dict()
if keyValues is not None and isinstance(keyValues,dict):
keys = keyValues.keys()
for key in keys:
value = keyValues.get(key)
if value is not None and isinstance(value,list) and len(value)>0:
value = value[0]
newMap[key] = value
return newMap
def getParameterList():
parametersList = ['origin','sts_token', 'force', 'recv_buf_size', 'accesskey', 'part_size', 'retry_times',\
'replace', 'thread_num', 'marker', 'exclude','skip_dir', 'out', 'check_point', 'strip_dir',\
'check_md5','delimiter', 'skip_suffix', 'maxkeys', 'filename_list', 'location', 'temp_dir', \
'method', 'config_file', 'accessid', 'continue_download', 'allow_empty_referer','host',\
'referer', 'content_type', 'data', 'device_id', 'max_part_num', 'acl','headers',\
'part_number', 'upload_id', 'send_buf_size', 'timeout', 'debug', 'multipart_threshold']
return parametersList
def initKeyValues(parametersList):
newMap = dict.fromkeys(parametersList)
return newMap
def getParametersKV(keyValues,parameters):
if isinstance(keyValues,dict) and isinstance(parameters,dict):
keys = parameters.keys()
for item in keyValues:
if item in keys:
parameters[item] = keyValues[item]
return parameters
def getOptionsFromDict(parameters):
if isinstance(parameters,dict):
options = Values(parameters)
return options
def getOperations(operation):
list = []
if operation is not None:
list.append(operation)
return list
def getAvailableOperations():
setup_cmdlist()
return CMD_LIST.keys()
def handleOss():
parser = aliyunCliParser()
operation = parser._getOperations()
keyValues = parser._getKeyValues()
keyValues = parser.getOpenApiKeyValues(keyValues)
keyValues = getSuitableKeyValues(keyValues)
parameterList = getParameterList()
parameters = initKeyValues(parameterList)
parameters = getParametersKV(keyValues,parameters)
options = getOptionsFromDict(parameters)
args = operation
setup_cmdlist()
if args is None or len(args) < 1 or args[0] in HELP_CMD_LIST:
print HELP
sys.exit(1)
if args[0] not in CMD_LIST.keys():
print "unsupported command : %s " % args[0]
print HELP
sys.exit(1)
if options.config_file is not None:
CONFIGFILE = options.config_file
if options.debug is not None:
debug = options.debug
if debug.lower() == "true":
IS_DEBUG = True
else:
IS_DEBUG = False
if options.send_buf_size is not None:
try:
SEND_BUF_SIZE = (int)(options.send_buf_size)
except ValueError:
pass
if options.recv_buf_size is not None:
try:
RECV_BUF_SIZE = (int)(options.recv_buf_size)
except ValueError:
pass
if options.upload_id is not None:
check_upload_id(options.upload_id)
if args[0] != 'Config':
setup_credentials(options)
else:
CMD_LIST['Config'](args, options)
sys.exit(0)
cmd = args[0]
begin = time.time()
try:
res = CMD_LIST[cmd](args, options)
print_result(cmd, res)
except socket.timeout:
print "Socket timeout, please try again later."
sys.exit(1)
except socket.error, args:
print "Connect to oss failed: %s.\nplease check the host name you provided could be reached.\ne.g:" % (args)
print "\tcurl %s\nor\n\tping %s\n" % (OSS_HOST, OSS_HOST)
sys.exit(1)
end = time.time()
sys.stderr.write("%.3f(s) elapsed\n" % (end - begin))
if __name__ == '__main__':
handleOss()
|
lufornpy/aliyun-cli
|
aliyuncli/ossadp/ossHandler.py
|
Python
|
apache-2.0
| 88,246
|
from __future__ import annotations
from dataclasses import dataclass
import logging
import json
import numbers
from typing import (Any, Dict, List, Union, Tuple,
Sequence, TYPE_CHECKING)
from opentrons import config
from opentrons.config import feature_flags as ff
from opentrons_shared_data.pipette import (
model_config, name_config, fuse_specs)
if TYPE_CHECKING:
from opentrons_shared_data.pipette.dev_types import (
PipetteName, PipetteModel, UlPerMm, Quirk, PipetteFusedSpec
)
log = logging.getLogger(__name__)
@dataclass
class PipetteConfig:
top: float
bottom: float
blow_out: float
drop_tip: float
pick_up_current: float
pick_up_distance: float
pick_up_increment: float
pick_up_presses: int
pick_up_speed: float
aspirate_flow_rate: float
dispense_flow_rate: float
channels: float
model_offset: Tuple[float, float, float]
plunger_current: float
drop_tip_current: float
drop_tip_speed: float
min_volume: float
max_volume: float
ul_per_mm: UlPerMm
quirks: List[Quirk]
tip_length: float # TODO(seth): remove
# TODO: Replace entirely with tip length calibration
tip_overlap: Dict[str, float]
display_name: str
name: PipetteName
back_compat_names: List[PipetteName]
return_tip_height: float
blow_out_flow_rate: float
max_travel: float
home_position: float
steps_per_mm: float
idle_current: float
default_blow_out_flow_rates: Dict[str, float]
default_aspirate_flow_rates: Dict[str, float]
default_dispense_flow_rates: Dict[str, float]
model: PipetteModel
# Notes:
# - multi-channel pipettes share the same dimensional offsets
# - single-channel pipettes have different lengths
# - Default number of seconds to aspirate/dispense a pipette's full volume,
# and these times were chosen to mimic normal human-pipetting motions.
# However, accurate speeds are dependent on environment (ex: liquid
# viscosity), therefore a pipette's flow-rates (ul/sec) should be set by
# protocol writer
# Multi-channel y offset caluclations:
DISTANCE_BETWEEN_NOZZLES = 9
NUM_MULTI_CHANNEL_NOZZLES = 8
MULTI_LENGTH = (NUM_MULTI_CHANNEL_NOZZLES - 1) * DISTANCE_BETWEEN_NOZZLES
Y_OFFSET_MULTI = MULTI_LENGTH / 2
Z_OFFSET_MULTI = -25.8
Z_OFFSET_P10 = -13 # longest single-channel pipette
Z_OFFSET_P50 = 0
Z_OFFSET_P300 = 0
Z_OFFSET_P1000 = 20 # shortest single-channel pipette
LOW_CURRENT_DEFAULT = 0.05
config_models = list(model_config()['config'].keys())
config_names = list(name_config().keys())
configs = model_config()['config']
#: A list of pipette model names for which we have config entries
MUTABLE_CONFIGS = model_config()['mutableConfigs']
#: A list of mutable configs for pipettes
VALID_QUIRKS = model_config()['validQuirks']
#: A list of valid quirks for pipettes
def load(
pipette_model: PipetteModel,
pipette_id: str = None) -> PipetteConfig:
"""
Load pipette config data
This function loads from a combination of
- the pipetteModelSpecs.json file in the wheel (should never be edited)
- the pipetteNameSpecs.json file in the wheel (should never be edited)
- any config overrides found in
``opentrons.config.CONFIG['pipette_config_overrides_dir']``
This function reads from disk each time, so changes to the overrides
will be picked up in subsequent calls.
:param str pipette_model: The pipette model name (i.e. "p10_single_v1.3")
for which to load configuration
:param pipette_id: An (optional) unique ID for the pipette to locate
config overrides. If the ID is not specified, the system
assumes this is a simulated pipette and does not
save settings. If the ID is specified but no overrides
corresponding to the ID are found, the system creates a
new overrides file for it.
:type pipette_id: str or None
:raises KeyError: if ``pipette_model`` is not in the top-level keys of
the pipetteModelSpecs.json file (and therefore not in
:py:attr:`configs`)
:returns PipetteConfig: The configuration, loaded and checked
"""
# Load the model config and update with the name config
cfg = fuse_specs(pipette_model)
# Load overrides if we have a pipette id
if pipette_id:
try:
override = load_overrides(pipette_id)
if 'quirks' in override.keys():
override['quirks'] = [
qname for qname, qval in override['quirks'].items()
if qval]
for legacy_key in (
'defaultAspirateFlowRate',
'defaultDispenseFlowRate',
'defaultBlowOutFlowRate'):
override.pop(legacy_key, None)
except FileNotFoundError:
save_overrides(pipette_id, {}, pipette_model)
log.info(
"Save defaults for pipette model {} and id {}".format(
pipette_model, pipette_id))
else:
cfg.update(override) # type: ignore
# the ulPerMm functions are structured in pipetteModelSpecs.json as
# a list sorted from oldest to newest. That means the latest functions
# are always the last element and, as of right now, the older ones are
# the first element (for models that only have one function, the first
# and last elements are the same, which is fine). If we add more in the
# future, we’ll have to change this code to select items more
# intelligently
if ff.use_old_aspiration_functions():
log.debug("Using old aspiration functions")
ul_per_mm = cfg['ulPerMm'][0]
else:
ul_per_mm = cfg['ulPerMm'][-1]
smoothie_configs = cfg['smoothieConfigs']
res = PipetteConfig(
top=ensure_value(
cfg, 'top', MUTABLE_CONFIGS),
bottom=ensure_value(
cfg, 'bottom', MUTABLE_CONFIGS),
blow_out=ensure_value(
cfg, 'blowout', MUTABLE_CONFIGS),
drop_tip=ensure_value(
cfg, 'dropTip', MUTABLE_CONFIGS),
pick_up_current=ensure_value(cfg, 'pickUpCurrent', MUTABLE_CONFIGS),
pick_up_distance=ensure_value(cfg, 'pickUpDistance', MUTABLE_CONFIGS),
pick_up_increment=ensure_value(
cfg, 'pickUpIncrement', MUTABLE_CONFIGS),
pick_up_presses=ensure_value(cfg, 'pickUpPresses', MUTABLE_CONFIGS),
pick_up_speed=ensure_value(cfg, 'pickUpSpeed', MUTABLE_CONFIGS),
aspirate_flow_rate=cfg['defaultAspirateFlowRate']['value'],
dispense_flow_rate=cfg['defaultDispenseFlowRate']['value'],
channels=ensure_value(cfg, 'channels', MUTABLE_CONFIGS),
model_offset=ensure_value(cfg, 'modelOffset', MUTABLE_CONFIGS),
plunger_current=ensure_value(cfg, 'plungerCurrent', MUTABLE_CONFIGS),
drop_tip_current=ensure_value(cfg, 'dropTipCurrent', MUTABLE_CONFIGS),
drop_tip_speed=ensure_value(cfg, 'dropTipSpeed', MUTABLE_CONFIGS),
min_volume=ensure_value(cfg, 'minVolume', MUTABLE_CONFIGS),
max_volume=ensure_value(cfg, 'maxVolume', MUTABLE_CONFIGS),
ul_per_mm=ul_per_mm,
quirks=validate_quirks(ensure_value(cfg, 'quirks', MUTABLE_CONFIGS)),
tip_overlap=cfg['tipOverlap'],
tip_length=ensure_value(cfg, 'tipLength', MUTABLE_CONFIGS),
display_name=ensure_value(cfg, 'displayName', MUTABLE_CONFIGS),
name=cfg['name'],
back_compat_names=cfg.get('backCompatNames', []),
return_tip_height=cfg.get('returnTipHeight', 0.5),
blow_out_flow_rate=cfg['defaultBlowOutFlowRate']['value'],
max_travel=smoothie_configs['travelDistance'],
home_position=smoothie_configs['homePosition'],
steps_per_mm=smoothie_configs['stepsPerMM'],
idle_current=cfg.get('idleCurrent', LOW_CURRENT_DEFAULT),
default_blow_out_flow_rates=cfg['defaultBlowOutFlowRate'].get(
'valuesByApiLevel',
{'2.0': cfg['defaultBlowOutFlowRate']['value']}),
default_dispense_flow_rates=cfg['defaultDispenseFlowRate'].get(
'valuesByApiLevel',
{'2.0': cfg['defaultDispenseFlowRate']['value']}),
default_aspirate_flow_rates=cfg['defaultAspirateFlowRate'].get(
'valuesByApiLevel',
{'2.0': cfg['defaultAspirateFlowRate']['value']}),
model=pipette_model,
)
return res
def piecewise_volume_conversion(
ul: float, sequence: List[List[float]]) -> float:
"""
Takes a volume in microliters and a sequence representing a piecewise
function for the slope and y-intercept of a ul/mm function, where each
sub-list in the sequence contains:
- the max volume for the piece of the function (minimum implied from the
max of the previous item or 0
- the slope of the segment
- the y-intercept of the segment
:return: the ul/mm value for the specified volume
"""
# pick the first item from the seq for which the target is less than
# the bracketing element
i = list(filter(lambda x: ul <= x[0], sequence))[0]
# use that element to calculate the movement distance in mm
return i[1]*ul + i[2]
TypeOverrides = Dict[str, Union[float, bool, None]]
def validate_overrides(data: TypeOverrides,
config_model: Dict) -> None:
"""
Check that override fields are valid.
:param data: a dict of field name to value
:param config_model: the configuration for the chosen model
:raises ValueError: If a value is invalid
"""
for key, value in data.items():
field_config = config_model.get(key)
is_quirk = key in config_model['quirks']
if is_quirk:
# If it's a quirk it must be a bool or None
if value is not None and not isinstance(value, bool):
raise ValueError(f'{value} is invalid for {key}')
elif not field_config:
# If it's not a quirk we must have a field config
raise ValueError(f'Unknown field {key}')
elif value is not None:
# If value is not None it must be numeric and between min and max
if not isinstance(value, numbers.Number):
raise ValueError(f'{value} is invalid for {key}')
elif value < field_config['min'] or value > field_config['max']:
raise ValueError(f'{key} out of range with {value}')
def override(pipette_id: str, fields: TypeOverrides):
"""
Override configuration for pipette. Validate then save.
:param pipette_id: The pipette id
:param fields: Dict of field name to override value
"""
config_match = list_mutable_configs(pipette_id)
whole_config, model = load_config_dict(pipette_id)
validate_overrides(data=fields, config_model=config_match)
save_overrides(pipette_id, fields, model)
def save_overrides(pipette_id: str,
overrides: TypeOverrides,
model: PipetteModel):
"""
Save overrides for the pipette.
:param pipette_id: The pipette id
:param overrides: The incoming values
:param model: The model of pipette
:return: None
"""
override_dir = config.CONFIG['pipette_config_overrides_dir']
model_configs = configs[model]
model_configs_quirks = {key: True for key in model_configs['quirks']}
try:
existing = load_overrides(pipette_id)
# Add quirks setting for pipettes already with a pipette id file
if 'quirks' not in existing.keys():
existing['quirks'] = model_configs_quirks
except FileNotFoundError:
existing = model_configs_quirks # type: ignore
for key, value in overrides.items():
# If an existing override is saved as null from endpoint, remove from
# overrides file
if value is None:
if existing.get(key):
del existing[key]
elif isinstance(value, bool):
existing, model_configs = change_quirks(
{key: value}, existing, model_configs)
else:
# type ignores are here because mypy needs typed dict accesses to
# be string literals sadly enough
model_config_value = model_configs[key] # type: ignore
if not model_config_value.get('default'):
model_config_value['default']\
= model_config_value['value']
model_config_value['value'] = value
existing[key] = model_config_value
assert model in config_models
existing['model'] = model
json.dump(existing, (override_dir/f'{pipette_id}.json').open('w'))
def change_quirks(override_quirks, existing, model_configs):
if not existing.get('quirks'):
# ensure quirk key exists
existing['quirks'] = override_quirks
for quirk, setting in override_quirks.items():
# setting values again if above case true, but
# meant for use-cases where we may only be given an update
# for one setting
existing['quirks'][quirk] = setting
if setting not in model_configs['quirks']:
model_configs['quirks'].append(quirk)
elif not setting:
model_configs['quirks'].remove(quirk)
return existing, model_configs
def load_overrides(pipette_id: str) -> Dict[str, Any]:
overrides = config.CONFIG['pipette_config_overrides_dir']
fi = (overrides/f'{pipette_id}.json').open()
try:
return json.load(fi)
except json.JSONDecodeError as e:
log.warning(f'pipette override for {pipette_id} is corrupt: {e}')
(overrides/f'{pipette_id}.json').unlink()
raise FileNotFoundError(str(overrides/f'{pipette_id}.json'))
def validate_quirks(quirks: List[str]):
valid_quirks = []
for quirk in quirks:
if quirk in VALID_QUIRKS:
valid_quirks.append(quirk)
else:
log.warning(f'{quirk} is not a valid quirk')
return valid_quirks
def ensure_value(
config: PipetteFusedSpec,
name: Union[str, Tuple[str, ...]],
mutable_config_list: List[str]):
"""
Pull value of config data from file. Shape can either be a dictionary with
a value key -- indicating that it can be changed -- or another
data structure such as an array.
"""
if not isinstance(name, tuple):
path: Tuple[str, ...] = (name,)
else:
path = name
for element in path[:-1]:
config = config[element] # type: ignore
value = config[path[-1]] # type: ignore
if path[-1] != 'quirks' and path[-1] in mutable_config_list:
value = value['value']
return value
def known_pipettes() -> Sequence[str]:
""" List pipette IDs for which we have known overrides """
return [fi.stem
for fi in config.CONFIG['pipette_config_overrides_dir'].iterdir()
if fi.is_file() and '.json' in fi.suffixes]
def add_default(cfg):
if isinstance(cfg, dict):
if 'value' in cfg.keys():
cfg['default'] = cfg['value']
else:
for top_level_key in cfg.keys():
add_default(cfg[top_level_key])
def load_config_dict(pipette_id: str) -> Tuple[
'PipetteFusedSpec', 'PipetteModel']:
""" Give updated config with overrides for a pipette. This will add
the default value for a mutable config before returning the modified
config value.
"""
override = load_overrides(pipette_id)
model = override['model']
config = fuse_specs(model)
if 'quirks' not in override.keys():
override['quirks'] = {key: True for key in config['quirks']}
for top_level_key in config.keys():
if top_level_key != 'quirks':
add_default(config[top_level_key]) # type: ignore
config.update(override) # type: ignore
return config, model
def list_mutable_configs(pipette_id: str) -> Dict[str, Any]:
"""
Returns dict of mutable configs only.
"""
cfg: Dict[str, Any] = {}
if pipette_id in known_pipettes():
config, model = load_config_dict(pipette_id)
else:
log.info(f'Pipette id {pipette_id} not found')
return cfg
for key in config:
if key in MUTABLE_CONFIGS:
cfg[key] = config[key] # type: ignore
return cfg
|
Opentrons/labware
|
api/src/opentrons/config/pipette_config.py
|
Python
|
apache-2.0
| 16,434
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Users(horizon.Panel):
name = _("Users")
slug = 'users'
dashboard.Admin.register(Users)
|
JioCloud/horizon
|
openstack_dashboard/dashboards/admin/users/panel.py
|
Python
|
apache-2.0
| 1,001
|
# pylint: disable=invalid-unary-operand-type
from collections import OrderedDict
import json
import logging
from copy import deepcopy
from datetime import datetime, timedelta
from six import string_types
import requests
import sqlalchemy as sa
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean,
DateTime,
)
from sqlalchemy.orm import backref, relationship
from dateutil.parser import parse as dparse
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.postaggregator import (
Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality,
)
from pydruid.utils.having import Aggregation
from flask import Markup, escape
from flask_appbuilder.models.decorators import renders
from flask_appbuilder import Model
from flask_babel import lazy_gettext as _
from superset import conf, db, import_util, utils, sm, get_session
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
'type': 'javascript',
'fieldNames': field_names,
'name': name,
'function': function,
}
self.name = name
class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
__tablename__ = 'clusters'
type = "druid"
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
cluster_name = Column(String(250), unique=True)
coordinator_host = Column(String(255))
coordinator_port = Column(Integer, default=8081)
coordinator_endpoint = Column(
String(255), default='druid/coordinator/v1/metadata')
broker_host = Column(String(255))
broker_port = Column(Integer, default=8082)
broker_endpoint = Column(String(255), default='druid/v2')
metadata_last_refreshed = Column(DateTime)
cache_timeout = Column(Integer)
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name
def get_pydruid_client(self):
cli = PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def get_datasources(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
"{obj.coordinator_endpoint}/datasources"
).format(obj=self)
return json.loads(requests.get(endpoint).text)
def get_druid_version(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
).format(obj=self)
return json.loads(requests.get(endpoint).text)['version']
def refresh_datasources(self, datasource_name=None, merge_flag=False):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
for datasource in self.get_datasources():
if datasource not in conf.get('DRUID_DATA_SOURCE_BLACKLIST', []):
if not datasource_name or datasource_name == datasource:
DruidDatasource.sync_to_db(datasource, self, merge_flag)
@property
def perm(self):
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
def get_perm(self):
return self.perm
@property
def name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
@property
def unique_name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
class DruidColumn(Model, BaseColumn):
"""ORM model for storing Druid datasource column metadata"""
__tablename__ = 'columns'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
enable_typechecks=False)
dimension_spec_json = Column(Text)
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
)
def __repr__(self):
return self.column_name
@property
def expression(self):
return self.dimension_spec_json
@property
def dimension_spec(self):
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
def generate_metrics(self):
"""Generate metrics based on the column metadata"""
M = DruidMetric # noqa
metrics = []
metrics.append(DruidMetric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
))
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.is_num:
mt = corrected_type.lower() + 'Sum'
name = 'sum__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.avg and self.is_num:
mt = corrected_type.lower() + 'Avg'
name = 'avg__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.min and self.is_num:
mt = corrected_type.lower() + 'Min'
name = 'min__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
name = 'max__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.count_distinct:
name = 'count_distinct__' + self.column_name
if self.type == 'hyperUnique' or self.type == 'thetaSketch':
metrics.append(DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type=self.type,
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
))
else:
mt = 'count_distinct'
metrics.append(DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
))
session = get_session()
new_metrics = []
for metric in metrics:
m = (
session.query(M)
.filter(M.metric_name == metric.metric_name)
.filter(M.datasource_name == self.datasource_name)
.filter(DruidCluster.cluster_name == self.datasource.cluster_name)
.first()
)
metric.datasource_name = self.datasource_name
if not m:
new_metrics.append(metric)
session.add(metric)
session.flush()
@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(DruidColumn).filter(
DruidColumn.datasource_name == lookup_column.datasource_name,
DruidColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
class DruidMetric(Model, BaseMetric):
"""ORM object referencing Druid metrics for a datasource"""
__tablename__ = 'metrics'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
json = Column(Text)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
)
@property
def expression(self):
return self.json
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception:
obj = {}
return obj
@property
def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
) if self.datasource else None
@classmethod
def import_obj(cls, i_metric):
def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_name == lookup_metric.datasource_name,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
class DruidDatasource(Model, BaseDatasource):
"""ORM object referencing Druid datasources (tables)"""
__tablename__ = 'datasources'
type = "druid"
query_langtage = "json"
cluster_class = DruidCluster
metric_class = DruidMetric
column_class = DruidColumn
baselink = "druiddatasourcemodelview"
# Columns
datasource_name = Column(String(255), unique=True)
is_hidden = Column(Boolean, default=False)
fetch_values_from = Column(String(100))
cluster_name = Column(
String(250), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship(
sm.user_model,
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)
@property
def database(self):
return self.cluster
@property
def connection(self):
return str(self.database)
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self):
return self.datasource_name
@property
def schema(self):
ds_name = self.datasource_name or ''
name_pieces = ds_name.split('.')
if len(name_pieces) > 1:
return name_pieces[0]
else:
return None
@property
def schema_perm(self):
"""Returns schema permission if present, cluster one otherwise."""
return utils.get_schema_perm(self.cluster, self.schema)
def get_perm(self):
return (
"[{obj.cluster_name}].[{obj.datasource_name}]"
"(id:{obj.id})").format(obj=self)
@property
def link(self):
name = escape(self.datasource_name)
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
@property
def full_name(self):
return utils.get_datasource_full_name(
self.cluster_name, self.datasource_name)
@property
def time_column_grains(self):
return {
"time_columns": [
'all', '5 seconds', '30 seconds', '1 minute',
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
}
def __repr__(self):
return self.datasource_name
@renders('datasource_name')
def datasource_link(self):
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
name = escape(self.datasource_name)
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
@classmethod
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_datasource(d):
return db.session.query(DruidDatasource).join(DruidCluster).filter(
DruidDatasource.datasource_name == d.datasource_name,
DruidCluster.cluster_name == d.cluster_name,
).first()
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)
@staticmethod
def version_higher(v1, v2):
"""is v1 higher than v2
>>> DruidDatasource.version_higher('0.8.2', '0.9.1')
False
>>> DruidDatasource.version_higher('0.8.2', '0.6.1')
True
>>> DruidDatasource.version_higher('0.8.2', '0.8.2')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9')
False
"""
def int_or_0(v):
try:
v = int(v)
except (TypeError, ValueError):
v = 0
return v
v1nums = [int_or_0(n) for n in v1.split('.')]
v2nums = [int_or_0(n) for n in v2.split('.')]
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return v1nums[0] > v2nums[0] or \
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])
def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['maxTime']
max_time = dparse(max_time)
# Query segmentMetadata for 7 days back. However, due to a bug,
# we need to set this interval to more than 1 day ago to exclude
# realtime segments, which triggered a bug (fixed in druid 0.8.2).
# https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
lbound = (max_time - timedelta(days=7)).isoformat()
rbound = max_time.isoformat()
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = (max_time - timedelta(1)).isoformat()
segment_metadata = None
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=conf.get('DRUID_ANALYSIS_TYPES'))
except Exception as e:
logging.warning("Failed first attempt to get latest segment")
logging.exception(e)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
rbound = datetime(2050, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = datetime.now().isoformat()
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=conf.get('DRUID_ANALYSIS_TYPES'))
except Exception as e:
logging.warning("Failed 2nd attempt to get latest segment")
logging.exception(e)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
for col in self.columns:
col.generate_metrics()
@classmethod
def sync_to_db_from_config(cls, druid_config, user, cluster):
"""Merges the ds config from druid_config into one stored in the db."""
session = db.session()
datasource = (
session.query(cls)
.filter_by(
datasource_name=druid_config['name'])
.first()
)
# Create a new datasource.
if not datasource:
datasource = cls(
datasource_name=druid_config['name'],
cluster=cluster,
owner=user,
changed_by_fk=user.id,
created_by_fk=user.id,
)
session.add(datasource)
dimensions = druid_config['dimensions']
for dim in dimensions:
col_obj = (
session.query(DruidColumn)
.filter_by(
datasource_name=druid_config['name'],
column_name=dim)
.first()
)
if not col_obj:
col_obj = DruidColumn(
datasource_name=druid_config['name'],
column_name=dim,
groupby=True,
filterable=True,
# TODO: fetch type from Hive.
type="STRING",
datasource=datasource,
)
session.add(col_obj)
# Import Druid metrics
for metric_spec in druid_config["metrics_spec"]:
metric_name = metric_spec["name"]
metric_type = metric_spec["type"]
metric_json = json.dumps(metric_spec)
if metric_type == "count":
metric_type = "longSum"
metric_json = json.dumps({
"type": "longSum",
"name": metric_name,
"fieldName": metric_name,
})
metric_obj = (
session.query(DruidMetric)
.filter_by(
datasource_name=druid_config['name'],
metric_name=metric_name)
).first()
if not metric_obj:
metric_obj = DruidMetric(
metric_name=metric_name,
metric_type=metric_type,
verbose_name="%s(%s)" % (metric_type, metric_name),
datasource=datasource,
json=metric_json,
description=(
"Imported from the airolap config dir for %s" %
druid_config['name']),
)
session.add(metric_obj)
session.commit()
@classmethod
def sync_to_db(cls, name, cluster, merge):
"""Fetches metadata for that datasource and merges the Superset db"""
logging.info("Syncing Druid datasource [{}]".format(name))
session = get_session()
datasource = session.query(cls).filter_by(datasource_name=name).first()
if not datasource:
datasource = cls(datasource_name=name)
session.add(datasource)
flasher("Adding new datasource [{}]".format(name), "success")
else:
flasher("Refreshing datasource [{}]".format(name), "info")
session.flush()
datasource.cluster = cluster
datasource.merge_flag = merge
session.flush()
cols = datasource.latest_metadata()
if not cols:
logging.error("Failed at fetching the latest segment")
return
for col in cols:
# Skip the time column
if col == "__time":
continue
col_obj = (
session
.query(DruidColumn)
.filter_by(datasource_name=name, column_name=col)
.first()
)
datatype = cols[col]['type']
if not col_obj:
col_obj = DruidColumn(datasource_name=name, column_name=col)
session.add(col_obj)
if datatype == "STRING":
col_obj.groupby = True
col_obj.filterable = True
if datatype == "hyperUnique" or datatype == "thetaSketch":
col_obj.count_distinct = True
# If long or double, allow sum/min/max
if datatype == "LONG" or datatype == "DOUBLE":
col_obj.sum = True
col_obj.min = True
col_obj.max = True
if col_obj:
col_obj.type = cols[col]['type']
session.flush()
col_obj.datasource = datasource
col_obj.generate_metrics()
session.flush()
@staticmethod
def time_offset(granularity):
if granularity == 'week_ending_saturday':
return 6 * 24 * 3600 * 1000 # 6 days
return 0
# uses https://en.wikipedia.org/wiki/ISO_8601
# http://druid.io/docs/0.8.0/querying/granularities.html
# TODO: pass origin from the UI
@staticmethod
def granularity(period_name, timezone=None, origin=None):
if not period_name or period_name == 'all':
return 'all'
iso_8601_dict = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
}
granularity = {'type': 'period'}
if timezone:
granularity['timeZone'] = timezone
if origin:
dttm = utils.parse_human_datetime(origin)
granularity['origin'] = dttm.isoformat()
if period_name in iso_8601_dict:
granularity['period'] = iso_8601_dict[period_name]
if period_name in ('week_ending_saturday', 'week_starting_sunday'):
# use Sunday as start of the week
granularity['origin'] = '2016-01-03T00:00:00'
elif not isinstance(period_name, string_types):
granularity['type'] = 'duration'
granularity['duration'] = period_name
elif period_name.startswith('P'):
# identify if the string is the iso_8601 period
granularity['period'] = period_name
else:
granularity['type'] = 'duration'
granularity['duration'] = utils.parse_human_timedelta(
period_name).total_seconds() * 1000
return granularity
@staticmethod
def _metrics_and_post_aggs(metrics, metrics_dict):
all_metrics = []
post_aggs = {}
def recursive_get_fields(_conf):
_type = _conf.get('type')
_field = _conf.get('field')
_fields = _conf.get('fields')
field_names = []
if _type in ['fieldAccess', 'hyperUniqueCardinality',
'quantile', 'quantiles']:
field_names.append(_conf.get('fieldName', ''))
if _field:
field_names += recursive_get_fields(_field)
if _fields:
for _f in _fields:
field_names += recursive_get_fields(_f)
return list(set(field_names))
for metric_name in metrics:
metric = metrics_dict[metric_name]
if metric.metric_type != 'postagg':
all_metrics.append(metric_name)
else:
mconf = metric.json_obj
all_metrics += recursive_get_fields(mconf)
all_metrics += mconf.get('fieldNames', [])
if mconf.get('type') == 'javascript':
post_aggs[metric_name] = JavascriptPostAggregator(
name=mconf.get('name', ''),
field_names=mconf.get('fieldNames', []),
function=mconf.get('function', ''))
elif mconf.get('type') == 'quantile':
post_aggs[metric_name] = Quantile(
mconf.get('name', ''),
mconf.get('probability', ''),
)
elif mconf.get('type') == 'quantiles':
post_aggs[metric_name] = Quantiles(
mconf.get('name', ''),
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
mconf.get('fn', "/"),
mconf.get('fields', []),
mconf.get('name', ''))
else:
post_aggs[metric_name] = CustomPostAggregator(
mconf.get('name', ''),
mconf)
return all_metrics, post_aggs
def values_for_column(self,
column_name,
limit=10000):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
else:
from_dttm = datetime(1970, 1, 1)
qry = dict(
datasource=self.datasource_name,
granularity="all",
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
aggregations=dict(count=count("count")),
dimension=column_name,
metric="count",
threshold=limit,
)
client = self.cluster.get_pydruid_client()
client.topn(**qry)
df = client.export_pandas()
return [row[column_name] for row in df.to_records(index=False)]
def get_query_str(self, query_obj, phase=1, client=None):
return self.run_query(client=client, phase=phase, **query_obj)
def run_query( # noqa / druid
self,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
row_limit=None,
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
timezone = from_dttm.tzname()
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
columns_dict = {c.column_name: c for c in self.columns}
all_metrics, post_aggs = self._metrics_and_post_aggs(metrics, metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
if m.metric_name in all_metrics:
aggregations[m.metric_name] = m.json_obj
rejected_metrics = [
m.metric_name for m in self.metrics
if m.is_restricted and
m.metric_name in aggregations.keys() and
not sm.has_access('metric_access', m.perm)
]
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
)
# the dimensions list with dimensionSpecs expanded
dimensions = []
groupby = [gb for gb in groupby if gb in columns_dict]
for column_name in groupby:
col = columns_dict.get(column_name)
dim_spec = col.dimension_spec
if dim_spec:
dimensions.append(dim_spec)
else:
dimensions.append(column_name)
qry = dict(
datasource=self.datasource_name,
dimensions=dimensions,
aggregations=aggregations,
granularity=DruidDatasource.granularity(
granularity,
timezone=timezone,
origin=extras.get('druid_time_origin'),
),
post_aggregations=post_aggs,
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = self.get_filters(filter)
if filters:
qry['filter'] = filters
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters
orig_filters = filters
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1:
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
order_by = metrics[0] if metrics else self.metrics[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": timeseries_limit,
'intervals': (
inner_from_dttm.isoformat() + '/' +
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": "descending",
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
if df is not None and not df.empty:
dims = qry['dimensions']
filters = []
for unused, row in df.iterrows():
fields = []
for dim in dims:
f = Dimension(dim) == row[dim]
fields.append(f)
if len(fields) > 1:
filt = Filter(type="and", fields=fields)
filters.append(filt)
elif fields:
filters.append(fields[0])
if filters:
ff = Filter(type="or", fields=filters)
if not orig_filters:
qry['filter'] = ff
else:
qry['filter'] = Filter(type="and", fields=[
ff,
orig_filters])
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": "descending",
}],
}
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str
def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(
client=client, query_obj=query_obj, phase=2)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]
# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]
time_offset = DruidDatasource.time_offset(query_obj['granularity'])
def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)
if DTTM_ALIAS in df.columns and time_offset:
df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
def get_filters(self, raw_filters): # noqa
filters = None
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ('in', 'not in'):
eq = [
types.replace("'", '').strip()
if isinstance(types, string_types)
else types
for types in eq]
elif not isinstance(flt['val'], string_types):
eq = eq[0] if len(eq) > 0 else ''
if col in self.num_cols:
if op in ('in', 'not in'):
eq = [utils.string_to_num(v) for v in eq]
else:
eq = utils.string_to_num(eq)
if op == '==':
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
if len(eq) > 1:
for s in eq:
fields.append(Dimension(col) == s)
cond = Filter(type="or", fields=fields)
elif len(eq) == 1:
cond = Dimension(col) == eq[0]
if op == 'not in':
cond = ~cond
elif op == 'regex':
cond = Filter(type="regex", pattern=eq, dimension=col)
elif op == '>=':
cond = Dimension(col) >= eq
elif op == '<=':
cond = Dimension(col) <= eq
elif op == '>':
cond = Dimension(col) > eq
elif op == '<':
cond = Dimension(col) < eq
if filters:
filters = Filter(type="and", fields=[
cond,
filters
])
else:
filters = cond
return filters
def _get_having_obj(self, col, op, eq):
cond = None
if op == '==':
if col in self.column_names:
cond = DimSelector(dimension=col, value=eq)
else:
cond = Aggregation(col) == eq
elif op == '>':
cond = Aggregation(col) > eq
elif op == '<':
cond = Aggregation(col) < eq
return cond
def get_having_filters(self, raw_filters):
filters = None
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
}
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ['==', '>', '<']:
cond = self._get_having_obj(col, op, eq)
elif op in reversed_op_map:
cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
if filters:
filters = filters & cond
else:
filters = cond
return filters
@classmethod
def query_datasources_by_name(
cls, session, database, datasource_name, schema=None):
return (
session.query(cls)
.filter_by(cluster_name=database.id)
.filter_by(datasource_name=datasource_name)
.all()
)
sa.event.listen(DruidDatasource, 'after_insert', set_perm)
sa.event.listen(DruidDatasource, 'after_update', set_perm)
|
nekia/incubator-superset-dev
|
superset/connectors/druid/models.py
|
Python
|
apache-2.0
| 40,706
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_maps_ext(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def maps_get_all_policy_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
input = ET.SubElement(maps_get_all_policy, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_output_policy_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
output = ET.SubElement(maps_get_all_policy, "output")
policy = ET.SubElement(output, "policy")
policyname = ET.SubElement(policy, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
input = ET.SubElement(maps_get_rules, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rbridgeid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rbridgeid = ET.SubElement(rules, "rbridgeid")
rbridgeid.text = kwargs.pop('rbridgeid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rulename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rulename = ET.SubElement(rules, "rulename")
rulename.text = kwargs.pop('rulename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_groupname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
groupname = ET.SubElement(rules, "groupname")
groupname.text = kwargs.pop('groupname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_monitor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
monitor = ET.SubElement(rules, "monitor")
monitor.text = kwargs.pop('monitor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_op(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
op = ET.SubElement(rules, "op")
op.text = kwargs.pop('op')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
value = ET.SubElement(rules, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
action = ET.SubElement(rules, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
timebase = ET.SubElement(rules, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
policyname = ET.SubElement(rules, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
input = ET.SubElement(maps_get_all_policy, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_all_policy_output_policy_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_all_policy = ET.Element("maps_get_all_policy")
config = maps_get_all_policy
output = ET.SubElement(maps_get_all_policy, "output")
policy = ET.SubElement(output, "policy")
policyname = ET.SubElement(policy, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
input = ET.SubElement(maps_get_rules, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rbridgeid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rbridgeid = ET.SubElement(rules, "rbridgeid")
rbridgeid.text = kwargs.pop('rbridgeid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_rulename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
rulename = ET.SubElement(rules, "rulename")
rulename.text = kwargs.pop('rulename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_groupname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
groupname = ET.SubElement(rules, "groupname")
groupname.text = kwargs.pop('groupname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_monitor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
monitor = ET.SubElement(rules, "monitor")
monitor.text = kwargs.pop('monitor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_op(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
op = ET.SubElement(rules, "op")
op.text = kwargs.pop('op')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
value = ET.SubElement(rules, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
action = ET.SubElement(rules, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
timebase = ET.SubElement(rules, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def maps_get_rules_output_rules_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
policyname = ET.SubElement(rules, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
BRCDcomm/pynos
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_maps_ext.py
|
Python
|
apache-2.0
| 12,855
|
# coding=utf-8
'''
cron trigger
@author: Huiyugeng
'''
import datetime
import trigger
class CronTrigger(trigger.Trigger):
def __init__(self, cron):
trigger.Trigger.__init__(self, 0, 1);
self.cron = cron
def _is_match(self):
parser = CronParser(self.cron)
_date = datetime.date.today()
_time = datetime.datetime.now()
return parser._is_match(_date, _time)
class CronParser():
def __init__(self, cron):
cron_item = cron.split(' ')
if len(cron_item) == 6 or len(cron_item) == 7:
self.second_set = self._parse_integer(cron_item[0], 0, 59)
self.minute_set = self._parse_integer(cron_item[1], 0, 59)
self.hour_set = self._parse_integer(cron_item[2], 0, 23)
self.day_of_month_set = self._parse_integer(cron_item[3], 1, 31)
self.month_set = self._parse_month(cron_item[4])
self.day_of_week_set = self._parse_day_of_week(cron_item[5])
if len(cron_item) == 7:
self.year_set = self._parse_integer(cron_item[6], 1970, 2100)
def _parse_integer(self, value, min_val, max_val):
result = []
range_items = []
if ',' in value:
range_items = value.split(',')
else:
range_items.append(value)
for range_item in range_items:
temp_result = []
interval = 1
if '/' in range_item:
temp = range_item.split('/')
range_item = temp[0]
interval = int(temp[1])
if interval < 1:
interval = 1
if '*' in range_item:
temp_result.extend(self._add_to_set(min_val, max_val))
elif '-' in range_item:
item = range_item.split('-')
temp_result.extend(self._add_to_set(int(item[0]), int(item[1])))
else:
temp_result.append(int(range_item))
count = 0
for item in temp_result:
if count % interval == 0:
result.append(item)
count = count + 1
return result
def _add_to_set(self, start, end):
result = [i for i in range(start, end + 1)]
return result
def _parse_month(self, value):
months = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"]
for i in range(0, 12):
value = value.replace(months[i], str(i + 1))
return self._parse_integer(value, 1, 12);
def _parse_day_of_week(self, value):
day_of_weeks = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"]
for i in range(0, 7):
value = value.replace(day_of_weeks[i], str(i + 1));
return self._parse_integer(value, 1, 7);
def _is_match(self, _date, _time):
# In Python datetime's weekday Monday is 0 and Sunday is 6
day_of_week = _date.weekday() + 1
result = True and \
_time.second in self.second_set and \
_time.minute in self.minute_set and \
_time.hour in self.hour_set and \
_date.day in self.day_of_month_set and \
_date.month in self.month_set and \
_date.year in self.year_set and \
day_of_week in self.day_of_week_set
return result
|
interhui/py_task
|
task/trigger/cron_trigger.py
|
Python
|
artistic-2.0
| 3,634
|
from urllib.parse import quote_plus
import praw
QUESTIONS = ['what is', 'who is', 'what are']
REPLY_TEMPLATE = '[Let me google that for you](http://lmgtfy.com/?q={})'
def main():
reddit = praw.Reddit(user_agent='LMGTFY (by /u/USERNAME)',
client_id='CLIENT_ID', client_secret="CLIENT_SECRET",
username='USERNAME', password='PASSWORD')
subreddit = reddit.subreddit('AskReddit')
for submission in subreddit.stream.submissions():
process_submission(submission)
def process_submission(submission):
# Ignore titles with more than 10 words as they probably are not simple
# questions.
if len(submission.title.split()) > 10:
return
normalized_title = submission.title.lower()
for question_phrase in QUESTIONS:
if question_phrase in normalized_title:
url_title = quote_plus(submission.title)
reply_text = REPLY_TEMPLATE.format(url_title)
print('Replying to: {}'.format(submission.title))
submission.reply(reply_text)
# A reply has been made so do not attempt to match other phrases.
break
if __name__ == '__main__':
main()
|
RGood/praw
|
docs/examples/lmgtfy_bot.py
|
Python
|
bsd-2-clause
| 1,206
|
from django.contrib.auth.models import User
from django.core import mail
from django.test.client import RequestFactory
from mock import ANY, patch
from nose.tools import eq_, ok_
from test_utils import TestCase
from remo.dashboard.forms import EmailRepsForm
from remo.profiles.tests import FunctionalAreaFactory, UserFactory
class EmailRepsFormsTest(TestCase):
def setUp(self):
self.functional_area = FunctionalAreaFactory.create()
def test_form_tampered_functional_area(self):
"""Test form with tampered data in functional area field."""
data = {'subject': 'Test email subject',
'body': None,
'functional_area': 'Non existing functional area'}
form = EmailRepsForm(data=data)
ok_(not form.is_valid())
eq_(len(form.errors['functional_area']), 1)
@patch('remo.dashboard.forms.messages.success')
def test_send_mail(self, fake_messages):
"""Test EmailRepsForm email sending functionality."""
data = {'subject': 'Test email subject',
'body': 'Test email body',
'functional_area': self.functional_area.id}
form = EmailRepsForm(data=data)
ok_(form.is_valid())
area = self.functional_area
UserFactory.create_batch(20, userprofile__functional_areas=[area])
factory = RequestFactory()
request = factory.request()
request.user = UserFactory.create()
reps = User.objects.filter(userprofile__functional_areas__name=area)
form.send_email(request, reps)
eq_(len(mail.outbox), 20)
def format_name(user):
return '%s %s <%s>' % (user.first_name, user.last_name, user.email)
recipients = map(format_name, reps)
receivers = []
for i in range(0, len(mail.outbox)):
eq_(mail.outbox[i].subject, data['subject'])
eq_(mail.outbox[i].body, data['body'])
receivers.append(mail.outbox[i].to[0])
eq_(set(receivers), set(recipients))
fake_messages.assert_called_with(ANY, 'Email sent successfully.')
|
chirilo/remo
|
remo/dashboard/tests/test_forms.py
|
Python
|
bsd-3-clause
| 2,108
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.core.files.storage import FileSystemStorage
from django.forms import Form
from django.template.response import SimpleTemplateResponse
from django.urls import NoReverseMatch
from formtools.wizard.views import SessionWizardView
from cms.models import Page
from cms.utils import get_current_site
from cms.utils.i18n import get_site_language_from_request
from .wizard_pool import wizard_pool
from .forms import (
WizardStep1Form,
WizardStep2BaseForm,
step2_form_factory,
)
class WizardCreateView(SessionWizardView):
template_name = 'cms/wizards/start.html'
file_storage = FileSystemStorage(
location=os.path.join(settings.MEDIA_ROOT, 'wizard_tmp_files'))
form_list = [
('0', WizardStep1Form),
# Form is used as a placeholder form.
# the real form will be loaded after step 0
('1', Form),
]
def dispatch(self, *args, **kwargs):
user = self.request.user
if not user.is_active or not user.is_staff:
raise PermissionDenied
self.site = get_current_site()
return super(WizardCreateView, self).dispatch(*args, **kwargs)
def get_current_step(self):
"""Returns the current step, if possible, else None."""
try:
return self.steps.current
except AttributeError:
return None
def is_first_step(self, step=None):
step = step or self.get_current_step()
return step == '0'
def is_second_step(self, step=None):
step = step or self.get_current_step()
return step == '1'
def get_context_data(self, **kwargs):
context = super(WizardCreateView, self).get_context_data(**kwargs)
if self.is_second_step():
context['wizard_entry'] = self.get_selected_entry()
return context
def get_form(self, step=None, data=None, files=None):
if step is None:
step = self.steps.current
# We need to grab the page from pre-validated data so that the wizard
# has it to prepare the list of valid entries.
if data:
page_key = "{0}-page".format(step)
self.page_pk = data.get(page_key, None)
else:
self.page_pk = None
if self.is_second_step(step):
self.form_list[step] = self.get_step_2_form(step, data, files)
return super(WizardCreateView, self).get_form(step, data, files)
def get_form_kwargs(self, step=None):
"""This is called by self.get_form()"""
kwargs = super(WizardCreateView, self).get_form_kwargs()
kwargs['wizard_user'] = self.request.user
if self.is_second_step(step):
kwargs['wizard_page'] = self.get_origin_page()
kwargs['wizard_language'] = self.get_origin_language()
else:
page_pk = self.page_pk or self.request.GET.get('page', None)
if page_pk and page_pk != 'None':
kwargs['wizard_page'] = Page.objects.filter(pk=page_pk).first()
else:
kwargs['wizard_page'] = None
kwargs['wizard_language'] = get_site_language_from_request(
self.request,
site_id=self.site.pk,
)
return kwargs
def get_form_initial(self, step):
"""This is called by self.get_form()"""
initial = super(WizardCreateView, self).get_form_initial(step)
if self.is_first_step(step):
initial['page'] = self.request.GET.get('page')
initial['language'] = self.request.GET.get('language')
return initial
def get_step_2_form(self, step=None, data=None, files=None):
entry_form_class = self.get_selected_entry().form
step_2_base_form = self.get_step_2_base_form()
form = step2_form_factory(
mixin_cls=step_2_base_form,
entry_form_class=entry_form_class,
)
return form
def get_step_2_base_form(self):
"""
Returns the base form to be used for step 2.
This form is sub classed dynamically by the form defined per module.
"""
return WizardStep2BaseForm
def get_template_names(self):
if self.is_first_step():
template_name = self.template_name
else:
template_name = self.get_selected_entry().template_name
return template_name
def done(self, form_list, **kwargs):
"""
This step only runs if all forms are valid. Simply emits a simple
template that uses JS to redirect to the newly created object.
"""
form_one, form_two = list(form_list)
instance = form_two.save()
url = self.get_success_url(instance)
language = form_one.cleaned_data['language']
if not url:
page = self.get_origin_page()
if page:
try:
url = page.get_absolute_url(language)
except NoReverseMatch:
url = '/'
else:
url = '/'
return SimpleTemplateResponse("cms/wizards/done.html", {"url": url})
def get_selected_entry(self):
data = self.get_cleaned_data_for_step('0')
return wizard_pool.get_entry(data['entry'])
def get_origin_page(self):
data = self.get_cleaned_data_for_step('0')
return data.get('page')
def get_origin_language(self):
data = self.get_cleaned_data_for_step('0')
return data.get('language')
def get_success_url(self, instance):
entry = self.get_selected_entry()
language = self.get_origin_language()
success_url = entry.get_success_url(
obj=instance,
language=language,
)
return success_url
|
benzkji/django-cms
|
cms/wizards/views.py
|
Python
|
bsd-3-clause
| 5,882
|
import pytest
from functools import partial
from bluesky.preprocessors import suspend_wrapper
from bluesky.suspenders import (SuspendBoolHigh,
SuspendBoolLow,
SuspendFloor,
SuspendCeil,
SuspendWhenOutsideBand,
SuspendInBand,
SuspendOutBand)
from bluesky.tests.utils import MsgCollector
from bluesky import Msg
import time as ttime
from bluesky.run_engine import RunEngineInterrupted
import threading
import time
from .utils import _fabricate_asycio_event
@pytest.mark.parametrize(
'klass,sc_args,start_val,fail_val,resume_val,wait_time',
[(SuspendBoolHigh, (), 0, 1, 0, .2),
(SuspendBoolLow, (), 1, 0, 1, .2),
(SuspendFloor, (.5,), 1, 0, 1, .2),
(SuspendCeil, (.5,), 0, 1, 0, .2),
(SuspendWhenOutsideBand, (.5, 1.5), 1, 0, 1, .2),
((SuspendInBand, True), (.5, 1.5), 1, 0, 1, .2), # renamed to WhenOutsideBand
((SuspendOutBand, True), (.5, 1.5), 0, 1, 0, .2)]) # deprecated
def test_suspender(klass, sc_args, start_val, fail_val,
resume_val, wait_time, RE, hw):
sig = hw.bool_sig
try:
klass, deprecated = klass
except TypeError:
deprecated = False
if deprecated:
with pytest.warns(UserWarning):
my_suspender = klass(sig,
*sc_args, sleep=wait_time)
else:
my_suspender = klass(sig,
*sc_args, sleep=wait_time)
my_suspender.install(RE)
def putter(val):
sig.put(val)
# make sure we start at good value!
putter(start_val)
# dumb scan
scan = [Msg('checkpoint'), Msg('sleep', None, .2)]
RE(scan)
# paranoid
assert RE.state == 'idle'
start = ttime.time()
# queue up fail and resume conditions
threading.Timer(.1, putter, (fail_val,)).start()
threading.Timer(.5, putter, (resume_val,)).start()
# start the scan
RE(scan)
stop = ttime.time()
# assert we waited at least 2 seconds + the settle time
delta = stop - start
print(delta)
assert delta > .5 + wait_time + .2
def test_pretripped(RE, hw):
'Tests if suspender is tripped before __call__'
sig = hw.bool_sig
scan = [Msg('checkpoint')]
msg_lst = []
sig.put(1)
def accum(msg):
msg_lst.append(msg)
susp = SuspendBoolHigh(sig)
RE.install_suspender(susp)
threading.Timer(1, sig.put, (0,)).start()
RE.msg_hook = accum
RE(scan)
assert len(msg_lst) == 2
assert ['wait_for', 'checkpoint'] == [m[0] for m in msg_lst]
@pytest.mark.parametrize('pre_plan,post_plan,expected_list',
[([Msg('null')], None,
['checkpoint', 'sleep', 'rewindable', 'null',
'wait_for', 'resume', 'rewindable', 'sleep']),
(None, [Msg('null')],
['checkpoint', 'sleep', 'rewindable',
'wait_for', 'resume', 'null', 'rewindable',
'sleep']),
([Msg('null')], [Msg('null')],
['checkpoint', 'sleep', 'rewindable', 'null',
'wait_for', 'resume', 'null', 'rewindable',
'sleep']),
(lambda: [Msg('null')], lambda: [Msg('null')],
['checkpoint', 'sleep', 'rewindable', 'null',
'wait_for', 'resume', 'null', 'rewindable',
'sleep'])])
def test_pre_suspend_plan(RE, pre_plan, post_plan, expected_list, hw):
sig = hw.bool_sig
scan = [Msg('checkpoint'), Msg('sleep', None, .2)]
msg_lst = []
sig.put(0)
def accum(msg):
msg_lst.append(msg)
susp = SuspendBoolHigh(sig, pre_plan=pre_plan,
post_plan=post_plan)
RE.install_suspender(susp)
threading.Timer(.1, sig.put, (1,)).start()
threading.Timer(1, sig.put, (0,)).start()
RE.msg_hook = accum
RE(scan)
assert len(msg_lst) == len(expected_list)
assert expected_list == [m[0] for m in msg_lst]
RE.remove_suspender(susp)
RE(scan)
assert susp.RE is None
RE.install_suspender(susp)
RE.clear_suspenders()
assert susp.RE is None
assert not RE.suspenders
def test_pause_from_suspend(RE, hw):
'Tests what happens when a pause is requested from a suspended state'
sig = hw.bool_sig
scan = [Msg('checkpoint')]
msg_lst = []
sig.put(1)
def accum(msg):
msg_lst.append(msg)
susp = SuspendBoolHigh(sig)
RE.install_suspender(susp)
threading.Timer(1, RE.request_pause).start()
threading.Timer(2, sig.put, (0,)).start()
RE.msg_hook = accum
with pytest.raises(RunEngineInterrupted):
RE(scan)
assert [m[0] for m in msg_lst] == ['wait_for']
RE.resume()
assert ['wait_for', 'wait_for', 'checkpoint'] == [m[0] for m in msg_lst]
def test_deferred_pause_from_suspend(RE, hw):
'Tests what happens when a soft pause is requested from a suspended state'
sig = hw.bool_sig
scan = [Msg('checkpoint'), Msg('null')]
msg_lst = []
sig.put(1)
def accum(msg):
print(msg)
msg_lst.append(msg)
susp = SuspendBoolHigh(sig)
RE.install_suspender(susp)
threading.Timer(1, RE.request_pause, (True,)).start()
threading.Timer(4, sig.put, (0,)).start()
RE.msg_hook = accum
with pytest.raises(RunEngineInterrupted):
RE(scan)
assert [m[0] for m in msg_lst] == ['wait_for', 'checkpoint']
RE.resume()
assert ['wait_for', 'checkpoint', 'null'] == [m[0] for m in msg_lst]
def test_unresumable_suspend_fail(RE):
'Tests what happens when a soft pause is requested from a suspended state'
scan = [Msg('clear_checkpoint'), Msg('sleep', None, 2)]
m_coll = MsgCollector()
RE.msg_hook = m_coll
ev = _fabricate_asycio_event(RE.loop)
threading.Timer(.1, partial(RE.request_suspend, fut=ev.wait)).start()
threading.Timer(1, ev.set).start()
start = time.time()
with pytest.raises(RunEngineInterrupted):
RE(scan)
stop = time.time()
assert .1 < stop - start < 1
def test_suspender_plans(RE, hw):
'Tests that the suspenders can be installed via Msg'
sig = hw.bool_sig
my_suspender = SuspendBoolHigh(sig, sleep=0.2)
def putter(val):
sig.put(val)
putter(0)
# Do the messages work?
RE([Msg('install_suspender', None, my_suspender)])
assert my_suspender in RE.suspenders
RE([Msg('remove_suspender', None, my_suspender)])
assert my_suspender not in RE.suspenders
# Can we call both in a plan?
RE([Msg('install_suspender', None, my_suspender),
Msg('remove_suspender', None, my_suspender)])
scan = [Msg('checkpoint'), Msg('sleep', None, .2)]
# No suspend scan: does the wrapper error out?
start = ttime.time()
RE(suspend_wrapper(scan, my_suspender))
stop = ttime.time()
delta = stop - start
assert delta < .9
# Suspend scan
start = ttime.time()
threading.Timer(.1, putter, (1,)).start()
threading.Timer(.5, putter, (0,)).start()
RE(suspend_wrapper(scan, my_suspender))
stop = ttime.time()
delta = stop - start
assert delta > .9
# Did we clean up?
start = ttime.time()
threading.Timer(.1, putter, (1,)).start()
threading.Timer(.5, putter, (0,)).start()
RE(scan)
stop = ttime.time()
delta = stop - start
assert delta < .9
|
ericdill/bluesky
|
bluesky/tests/test_suspenders.py
|
Python
|
bsd-3-clause
| 7,631
|
# cython: language_level=2
#
# ElementTree
# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
# 2007-09-10 fl new selection engine
# 2007-09-12 fl fixed parent selector
# 2007-09-13 fl added iterfind; changed findall to return a list
# 2007-11-30 fl added namespaces support
# 2009-10-30 fl added child element value filter
#
# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2009 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
from __future__ import absolute_import
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
r"\.\.|"
r"\(\)|"
r"[/.*:\[\]\(\)@=])|"
r"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
r"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
# ElementTree uses '', lxml used None originally.
default_namespace = (namespaces.get(None) or namespaces.get('')) if namespaces else None
parsing_attribute = False
for token in xpath_tokenizer_re.findall(pattern):
ttype, tag = token
if tag and tag[0] != "{":
if ":" in tag:
prefix, uri = tag.split(":", 1)
try:
if not namespaces:
raise KeyError
yield ttype, "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
elif default_namespace and not parsing_attribute:
yield ttype, "{%s}%s" % (default_namespace, tag)
else:
yield token
parsing_attribute = False
else:
yield token
parsing_attribute = ttype == '@'
def prepare_child(next, token):
tag = token[1]
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
yield e
return select
def prepare_star(next, token):
def select(result):
for elem in result:
for e in elem.iterchildren('*'):
yield e
return select
def prepare_self(next, token):
def select(result):
return result
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(result):
for elem in result:
for e in elem.iterdescendants(tag):
yield e
return select
def prepare_parent(next, token):
def select(result):
for elem in result:
parent = elem.getparent()
if parent is not None:
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = ''
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token == ('', ''):
# ignore whitespace
continue
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature += token[0] or "-"
predicate.append(token[1])
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match(r"-?\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(result):
for elem in result:
for _ in elem.iterchildren(tag):
yield elem
break
return select
if signature == ".='" or (signature == "-='" and not re.match(r"-?\d+$", predicate[0])):
# [.='value'] or [tag='value']
tag = predicate[0]
value = predicate[-1]
if tag:
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
if "".join(e.itertext()) == value:
yield elem
break
else:
def select(result):
for elem in result:
if "".join(elem.itertext()) == value:
yield elem
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
# [index]
index = int(predicate[0]) - 1
if index < 0:
if index == -1:
raise SyntaxError(
"indices in path predicates are 1-based, not 0-based")
else:
raise SyntaxError("path index >= 1 expected")
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
else:
index = -1
def select(result):
for elem in result:
parent = elem.getparent()
if parent is None:
continue
try:
# FIXME: what if the selector is "*" ?
elems = list(parent.iterchildren(elem.tag))
if elems[index] is elem:
yield elem
except IndexError:
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
# --------------------------------------------------------------------
_cache = {}
def _build_path_iterator(path, namespaces):
"""compile selector pattern"""
if path[-1:] == "/":
path += "*" # implicit all (FIXME: keep this?)
cache_key = (path,)
if namespaces:
# lxml originally used None for the default namespace but ElementTree uses the
# more convenient (all-strings-dict) empty string, so we support both here,
# preferring the more convenient '', as long as they aren't ambiguous.
if None in namespaces:
if '' in namespaces and namespaces[None] != namespaces['']:
raise ValueError("Ambiguous default namespace provided: %r versus %r" % (
namespaces[None], namespaces['']))
cache_key += (namespaces[None],) + tuple(sorted(
item for item in namespaces.items() if item[0] is not None))
else:
cache_key += tuple(sorted(namespaces.items()))
try:
return _cache[cache_key]
except KeyError:
pass
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
stream = iter(xpath_tokenizer(path, namespaces))
try:
_next = stream.next
except AttributeError:
# Python 3
_next = stream.__next__
try:
token = _next()
except StopIteration:
raise SyntaxError("empty path expression")
selector = []
while 1:
try:
selector.append(ops[token[0]](_next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = _next()
if token[0] == "/":
token = _next()
except StopIteration:
break
_cache[cache_key] = selector
return selector
##
# Iterate over the matching nodes
def iterfind(elem, path, namespaces=None):
selector = _build_path_iterator(path, namespaces)
result = iter((elem,))
for select in selector:
result = select(result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
it = iterfind(elem, path, namespaces)
try:
return next(it)
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
el = find(elem, path, namespaces)
if el is None:
return default
else:
return el.text or ''
|
lxml/lxml
|
src/lxml/_elementpath.py
|
Python
|
bsd-3-clause
| 10,742
|
"""MethodCaller provider traversal tests."""
from dependency_injector import providers
def test_traverse():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider = method.call()
all_providers = list(provider.traverse())
assert len(all_providers) == 3
assert provider1 in all_providers
assert provided in all_providers
assert method in all_providers
def test_traverse_args():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider2 = providers.Provider()
provider = method.call("foo", provider2)
all_providers = list(provider.traverse())
assert len(all_providers) == 4
assert provider1 in all_providers
assert provider2 in all_providers
assert provided in all_providers
assert method in all_providers
def test_traverse_kwargs():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider2 = providers.Provider()
provider = method.call(foo="foo", bar=provider2)
all_providers = list(provider.traverse())
assert len(all_providers) == 4
assert provider1 in all_providers
assert provider2 in all_providers
assert provided in all_providers
assert method in all_providers
def test_traverse_overridden():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider2 = providers.Provider()
provider = method.call()
provider.override(provider2)
all_providers = list(provider.traverse())
assert len(all_providers) == 4
assert provider1 in all_providers
assert provider2 in all_providers
assert provided in all_providers
assert method in all_providers
|
ets-labs/python-dependency-injector
|
tests/unit/providers/traversal/test_method_caller_py3.py
|
Python
|
bsd-3-clause
| 1,790
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Tests for the engine module
"""
import numpy as np
import scipy.sparse as ssp
import re
import mock
from nipype.pipeline.plugins.tools import report_crash
def test_report_crash():
with mock.patch('pickle.dump', mock.MagicMock()) as mock_pickle_dump:
with mock.patch('nipype.pipeline.plugins.tools.format_exception', mock.MagicMock()): # see iss 1517
mock_pickle_dump.return_value = True
mock_node = mock.MagicMock(name='mock_node')
mock_node._id = 'an_id'
mock_node.config = {
'execution' : {
'crashdump_dir' : '.',
'crashfile_format' : 'pklz',
}
}
actual_crashfile = report_crash(mock_node)
expected_crashfile = re.compile('.*/crash-.*-an_id-[0-9a-f\-]*.pklz')
assert expected_crashfile.match(actual_crashfile).group() == actual_crashfile
assert mock_pickle_dump.call_count == 1
'''
Can use the following code to test that a mapnode crash continues successfully
Need to put this into a nose-test with a timeout
import nipype.interfaces.utility as niu
import nipype.pipeline.engine as pe
wf = pe.Workflow(name='test')
def func(arg1):
if arg1 == 2:
raise Exception('arg cannot be ' + str(arg1))
return arg1
funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], output_names=['out']),
iterfield=['arg1'],
name = 'functor')
funkynode.inputs.arg1 = [1,2]
wf.add_nodes([funkynode])
wf.base_dir = '/tmp'
wf.run(plugin='MultiProc')
'''
|
mick-d/nipype
|
nipype/pipeline/plugins/tests/test_tools.py
|
Python
|
bsd-3-clause
| 1,756
|
# -*- coding: utf-8 -*-
#
# Diazo documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 2 18:58:07 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Diazo'
copyright = u'2011, Plone Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0b1'
# The full version, including alpha/beta/rc tags.
release = '1.0b1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Diazo"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = images/logo.jpg
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Diazodoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Diazo.tex', u'Diazo Documentation',
u'Plone Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'diazo', u'Diazo Documentation',
[u'Plone Foundation'], 1)
]
|
ebrehault/diazo
|
docs/conf.py
|
Python
|
bsd-3-clause
| 6,987
|
import time
import config
from ophyd import scaler
from ophyd.utils import enum
ScalerMode = enum(ONE_SHOT=0, AUTO_COUNT=1)
loggers = ('ophyd.signal',
'ophyd.scaler',
)
config.setup_loggers(loggers)
logger = config.logger
sca = scaler.EpicsScaler(config.scalers[0])
sca.preset_time.put(5.2, wait=True)
logger.info('Counting in One-Shot mode for %f s...', sca.preset_time.get())
sca.trigger()
logger.info('Sleeping...')
time.sleep(3)
logger.info('Done sleeping. Stopping counter...')
sca.count.put(0)
logger.info('Set mode to AutoCount')
sca.count_mode.put(ScalerMode.AUTO_COUNT, wait=True)
sca.trigger()
logger.info('Begin auto-counting (aka "background counting")...')
time.sleep(2)
logger.info('Set mode to OneShot')
sca.count_mode.put(ScalerMode.ONE_SHOT, wait=True)
time.sleep(1)
logger.info('Stopping (aborting) auto-counting.')
sca.count.put(0)
logger.info('read() all channels in one-shot mode...')
vals = sca.read()
logger.info(vals)
logger.info('sca.channels.get() shows: %s', sca.channels.get())
|
dchabot/ophyd
|
examples/scaler.py
|
Python
|
bsd-3-clause
| 1,039
|
import re
from os.path import join
from setuptools import find_packages
def get():
pkgnames = find_packages()
if len(pkgnames) == 0:
return "unknown"
pkgname = pkgnames[0]
content = open(join(pkgname, "__init__.py")).read()
c = re.compile(r"__version__ *= *('[^']+'|\"[^\"]+\")")
m = c.search(content)
if m is None:
return "unknown"
return m.groups()[0][1:-1]
|
limix/glimix-core
|
version.py
|
Python
|
mit
| 411
|
"""
Provide functionality to interact with Cast devices on the network.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.cast/
"""
# pylint: disable=import-error
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_STOP, MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pychromecast==0.7.4']
_LOGGER = logging.getLogger(__name__)
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
DEFAULT_PORT = 8009
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_STOP
KNOWN_HOSTS = []
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the cast platform."""
import pychromecast
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
pychromecast.IGNORE_CEC += ignore_cec
else:
_LOGGER.error('CEC config "%s" must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info in KNOWN_HOSTS:
return
elif discovery_info:
hosts = [discovery_info]
elif CONF_HOST in config:
hosts = [(config.get(CONF_HOST), DEFAULT_PORT)]
else:
hosts = [tuple(dev[:2]) for dev in pychromecast.discover_chromecasts()
if tuple(dev[:2]) not in KNOWN_HOSTS]
casts = []
for host in hosts:
try:
casts.append(CastDevice(*host))
KNOWN_HOSTS.append(host)
except pychromecast.ChromecastConnectionError:
pass
add_devices(casts)
class CastDevice(MediaPlayerDevice):
"""Representation of a Cast device on the network."""
# pylint: disable=abstract-method
# pylint: disable=too-many-public-methods
def __init__(self, host, port):
"""Initialize the Cast device."""
import pychromecast
self.cast = pychromecast.Chromecast(host, port)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
"""Return the state of the player."""
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media (Music track only)."""
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
"""Album arist of current playing media (Music track only)."""
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
"""The title of the series of current playing media (TV Show only)."""
return self.media_status.series_title if self.media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self.cast.app_id
@property
def app_name(self):
"""Name of the current running app."""
return self.cast.app_display_name
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_CAST
def turn_on(self):
"""Turn on the ChromeCast."""
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
import pychromecast
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
def turn_off(self):
"""Turn Chromecast off."""
self.cast.quit_app()
def mute_volume(self, mute):
"""Mute the volume."""
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cast.set_volume(volume)
def media_play(self):
"""Send play commmand."""
self.cast.media_controller.play()
def media_pause(self):
"""Send pause command."""
self.cast.media_controller.pause()
def media_stop(self):
"""Send stop command."""
self.cast.media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
self.cast.media_controller.rewind()
def media_next_track(self):
"""Send next track command."""
self.cast.media_controller.skip()
def media_seek(self, position):
"""Seek the media to a specific location."""
self.cast.media_controller.seek(position)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
self.cast.media_controller.play_media(media_id, media_type)
# Implementation of chromecast status_listener methods
def new_cast_status(self, status):
"""Called when a new cast status is received."""
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
"""Called when a new media status is received."""
self.media_status = status
self.update_ha_state()
|
hexxter/home-assistant
|
homeassistant/components/media_player/cast.py
|
Python
|
mit
| 8,925
|
from . import resource
class AuthorizationsBase(resource.GitHubResource):
path = 'authorizations'
class Authorization(AuthorizationsBase):
pass
class Authorizations(AuthorizationsBase):
pass
|
ducksboard/libsaas
|
libsaas/services/github/authorizations.py
|
Python
|
mit
| 210
|
from allauth.socialaccount.tests import OAuth2TestsMixin
from allauth.tests import MockedResponse, TestCase
from .provider import MicrosoftGraphProvider
class MicrosoftGraphTests(OAuth2TestsMixin, TestCase):
provider_id = MicrosoftGraphProvider.id
def get_mocked_response(self):
response_data = """
{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users/$entity",
"id": "16f5a7b6-5a15-4568-aa5a-31bb117e9967",
"businessPhones": [],
"displayName": "Anne Weiler",
"givenName": "Anne",
"jobTitle": "Manufacturing Lead",
"mail": "annew@CIE493742.onmicrosoft.com",
"mobilePhone": "+1 3528700812",
"officeLocation": null,
"preferredLanguage": "en-US",
"surname": "Weiler",
"userPrincipalName": "annew@CIE493742.onmicrosoft.com"
}
""" # noqa
return MockedResponse(200, response_data)
|
AltSchool/django-allauth
|
allauth/socialaccount/providers/microsoft/tests.py
|
Python
|
mit
| 988
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Function/method decorators that provide timeout and retry logic.
"""
import functools
import itertools
import sys
from devil.android import device_errors
from devil.utils import cmd_helper
from devil.utils import reraiser_thread
from devil.utils import timeout_retry
DEFAULT_TIMEOUT_ATTR = '_default_timeout'
DEFAULT_RETRIES_ATTR = '_default_retries'
def _TimeoutRetryWrapper(f, timeout_func, retries_func, pass_values=False):
""" Wraps a funcion with timeout and retry handling logic.
Args:
f: The function to wrap.
timeout_func: A callable that returns the timeout value.
retries_func: A callable that returns the retries value.
pass_values: If True, passes the values returned by |timeout_func| and
|retries_func| to the wrapped function as 'timeout' and
'retries' kwargs, respectively.
Returns:
The wrapped function.
"""
@functools.wraps(f)
def timeout_retry_wrapper(*args, **kwargs):
timeout = timeout_func(*args, **kwargs)
retries = retries_func(*args, **kwargs)
if pass_values:
kwargs['timeout'] = timeout
kwargs['retries'] = retries
@functools.wraps(f)
def impl():
return f(*args, **kwargs)
try:
if timeout_retry.CurrentTimeoutThreadGroup():
# Don't wrap if there's already an outer timeout thread.
return impl()
else:
desc = '%s(%s)' % (f.__name__, ', '.join(itertools.chain(
(str(a) for a in args),
('%s=%s' % (k, str(v)) for k, v in kwargs.iteritems()))))
return timeout_retry.Run(impl, timeout, retries, desc=desc)
except reraiser_thread.TimeoutError as e:
raise device_errors.CommandTimeoutError(str(e)), None, (
sys.exc_info()[2])
except cmd_helper.TimeoutError as e:
raise device_errors.CommandTimeoutError(str(e)), None, (
sys.exc_info()[2])
return timeout_retry_wrapper
def WithTimeoutAndRetries(f):
"""A decorator that handles timeouts and retries.
'timeout' and 'retries' kwargs must be passed to the function.
Args:
f: The function to decorate.
Returns:
The decorated function.
"""
get_timeout = lambda *a, **kw: kw['timeout']
get_retries = lambda *a, **kw: kw['retries']
return _TimeoutRetryWrapper(f, get_timeout, get_retries)
def WithExplicitTimeoutAndRetries(timeout, retries):
"""Returns a decorator that handles timeouts and retries.
The provided |timeout| and |retries| values are always used.
Args:
timeout: The number of seconds to wait for the decorated function to
return. Always used.
retries: The number of times the decorated function should be retried on
failure. Always used.
Returns:
The actual decorator.
"""
def decorator(f):
get_timeout = lambda *a, **kw: timeout
get_retries = lambda *a, **kw: retries
return _TimeoutRetryWrapper(f, get_timeout, get_retries)
return decorator
def WithTimeoutAndRetriesDefaults(default_timeout, default_retries):
"""Returns a decorator that handles timeouts and retries.
The provided |default_timeout| and |default_retries| values are used only
if timeout and retries values are not provided.
Args:
default_timeout: The number of seconds to wait for the decorated function
to return. Only used if a 'timeout' kwarg is not passed
to the decorated function.
default_retries: The number of times the decorated function should be
retried on failure. Only used if a 'retries' kwarg is not
passed to the decorated function.
Returns:
The actual decorator.
"""
def decorator(f):
get_timeout = lambda *a, **kw: kw.get('timeout', default_timeout)
get_retries = lambda *a, **kw: kw.get('retries', default_retries)
return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
return decorator
def WithTimeoutAndRetriesFromInstance(
default_timeout_name=DEFAULT_TIMEOUT_ATTR,
default_retries_name=DEFAULT_RETRIES_ATTR,
min_default_timeout=None):
"""Returns a decorator that handles timeouts and retries.
The provided |default_timeout_name| and |default_retries_name| are used to
get the default timeout value and the default retries value from the object
instance if timeout and retries values are not provided.
Note that this should only be used to decorate methods, not functions.
Args:
default_timeout_name: The name of the default timeout attribute of the
instance.
default_retries_name: The name of the default retries attribute of the
instance.
min_timeout: Miniumum timeout to be used when using instance timeout.
Returns:
The actual decorator.
"""
def decorator(f):
def get_timeout(inst, *_args, **kwargs):
ret = getattr(inst, default_timeout_name)
if min_default_timeout is not None:
ret = max(min_default_timeout, ret)
return kwargs.get('timeout', ret)
def get_retries(inst, *_args, **kwargs):
return kwargs.get('retries', getattr(inst, default_retries_name))
return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
return decorator
|
Teamxrtc/webrtc-streaming-node
|
third_party/webrtc/src/chromium/src/build/android/devil/android/decorators.py
|
Python
|
mit
| 5,401
|
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Q
from core.models import TimeStampedModel
from accounts.models import Account
class Board(models.Model):
def __str__(self):
return 'Board Name: ' + self.name
def get_absolute_url(self):
return reverse('board:post_list', args=[self.slug])
slug = models.CharField(default='', unique=True, max_length=100)
name = models.CharField(default='', max_length=100)
posts_chunk_size = models.IntegerField(default=10)
post_pages_nav_chunk_size = models.IntegerField(default=10)
comments_chunk_size = models.IntegerField(default=5)
comment_pages_nav_chunk_size = models.IntegerField(default=10)
class PostQuerySet(models.QuerySet):
def search(self, search_flag, query):
if search_flag == 'TITLE':
return self.filter(title__contains=query)
elif search_flag == 'CONTENT':
return self.filter(content__contains=query)
elif search_flag == 'BOTH':
return self.filter(Q(title__contains=query) | Q(content__contains=query))
else:
return self.all()
def remain(self):
return self.filter(is_deleted=False)
def board(self, board):
return self.filter(board=board)
class PostManager(models.Manager):
def get_queryset(self):
return PostQuerySet(self.model, using=self._db)
def search(self, search_flag, query):
return self.get_queryset().search(search_flag, query)
def remain(self):
return self.get_queryset().remain()
def board(self, board):
return self.get_queryset().board(board)
class Post(TimeStampedModel):
def __str__(self):
return 'Post Title: ' + self.title
SEARCH_FLAG = [
('TITLE', '제목'),
('CONTENT', '내용'),
('BOTH', '제목+내용')
]
objects = PostManager()
title = models.CharField(blank=False, max_length=100)
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
is_deleted = models.BooleanField(default=False)
page_view_count = models.IntegerField(default=0)
like_count = models.IntegerField(default=0)
account = models.ForeignKey(Account, null=True)
ip = models.GenericIPAddressField(null=True, default='')
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
class EditedPostHistory(TimeStampedModel):
post = models.ForeignKey(Post, null=False, default=None)
title = models.CharField(default='', max_length=100)
content = models.TextField(default='')
ip = models.GenericIPAddressField(null=True, default='')
class Attachment(models.Model):
post = models.ForeignKey(Post, null=True)
editedPostHistory = models.ForeignKey(EditedPostHistory, null=True, default=None)
attachment = models.FileField(blank=True, null=True)
class Comment(TimeStampedModel):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
is_deleted = models.BooleanField(default=False)
account = models.ForeignKey(Account, null=True)
ip = models.GenericIPAddressField(null=True, default='')
|
hyesun03/k-board
|
kboard/board/models.py
|
Python
|
mit
| 3,211
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FeaturedResource'
db.create_table('resources_featuredresource', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('topic', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['resources.Topic'])),
('resource_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['resources.ResourceType'])),
('resource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['resources.Resource'])),
))
db.send_create_signal('resources', ['FeaturedResource'])
# Adding unique constraint on 'FeaturedResource', fields ['topic', 'resource_type']
db.create_unique('resources_featuredresource', ['topic_id', 'resource_type_id'])
def backwards(self, orm):
# Removing unique constraint on 'FeaturedResource', fields ['topic', 'resource_type']
db.delete_unique('resources_featuredresource', ['topic_id', 'resource_type_id'])
# Deleting model 'FeaturedResource'
db.delete_table('resources_featuredresource')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'resources.featuredresource': {
'Meta': {'unique_together': "(('topic', 'resource_type'),)", 'object_name': 'FeaturedResource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['resources.Resource']"}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['resources.ResourceType']"}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['resources.Topic']"})
},
'resources.resource': {
'Meta': {'object_name': 'Resource'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'rating_score': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'rating_votes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['resources.ResourceType']"}),
'show': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'topics': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['resources.Topic']", 'symmetrical': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
},
'resources.resourcetype': {
'Meta': {'object_name': 'ResourceType'},
'color': ('django.db.models.fields.CharField', [], {'default': "'purple'", 'unique': 'True', 'max_length': '20'}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'resources.topic': {
'Meta': {'ordering': "['name']", 'object_name': 'Topic'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'official_website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['resources']
|
amitskwalia/codesters
|
resources/migrations/0004_auto__add_featuredresource__add_unique_featuredresource_topic_resource.py
|
Python
|
mit
| 8,260
|
import os
from home.models import ReplicaSet, WhatTorrent, WhatFulltext
def run_checks():
errors = []
warnings = []
# Check WhatFulltext integrity
def check_whatfulltext():
w_torrents = dict((w.id, w) for w in WhatTorrent.objects.defer('torrent_file').all())
w_fulltext = dict((w.id, w) for w in WhatFulltext.objects.all())
for id, w_t in w_torrents.items():
if id not in w_fulltext:
errors.append(u'{0} does not have a matching fulltext entry.'.format(w_t))
elif not w_fulltext[id].match(w_t):
errors.append(u'{0} does not match info with fulltext entry.'.format(w_t))
for id, w_f in w_fulltext.items():
if id not in w_torrents:
errors.append(u'{0} does not have a matching whattorrent entry.'.format(w_f))
check_whatfulltext()
for replica_set in ReplicaSet.objects.all():
m_torrents = {}
for instance in replica_set.transinstance_set.all():
i_m_torrents = instance.get_m_torrents_by_hash()
i_t_torrents = instance.get_t_torrents_by_hash(['id', 'hashString'])
for hash, m_torrent in i_m_torrents.items():
# Check if this torrent is already in another instance
if hash in m_torrents:
warnings.append(u'{0} is already in another instance of '
u'the same replica set: {1}'
.format(m_torrent, m_torrents[hash].instance))
# Check if the instance has the torrent
if hash not in i_t_torrents:
errors.append(u'{0} is in DB, but not in Transmission at instance {1}'
.format(m_torrent, instance))
m_torrents[hash] = m_torrent
# Check for the presence of metafiles if the instance is a master
if replica_set.is_master:
files_in_dir = os.listdir(m_torrent.path)
if not any('.torrent' in f for f in files_in_dir):
errors.append(u'Missing .torrent file for {0} at {1}'
.format(m_torrent, instance))
if not any('ReleaseInfo2.txt' == f for f in files_in_dir):
errors.append(u'Missing ReleaseInfo2.txt for {0} at {1}'
.format(m_torrent, instance))
for hash, t_torrent in i_t_torrents.items():
# Check if the database has the torrent
if hash not in i_m_torrents:
errors.append(u'{0} is in Transmission, but not in DB at instance {1}'
.format(t_torrent, instance))
return {
'errors': errors,
'warnings': warnings
}
|
MADindustries/WhatManager2
|
WhatManager2/checks.py
|
Python
|
mit
| 2,864
|
"""
Some helper functions for workspace stuff
"""
import logging
import re
import biokbase
import biokbase.workspace
from biokbase.workspace import client as WorkspaceClient
g_log = logging.getLogger(__name__)
# regex for parsing out workspace_id and object_id from
# a "ws.{workspace}.{object}" string
ws_regex = re.compile('^ws\.(?P<wsid>\d+)\.obj\.(?P<objid>\d+)')
# regex for parsing out a user_id from a token
user_id_regex = re.compile('^un=(?P<user_id>\w+)\|')
# Exception for a malformed workspace ID see regex above
class BadWorkspaceID(Exception):
pass
# Exception for a workspace object not found see regex above
class BadWorkspaceID(Exception):
pass
class PermissionsError(WorkspaceClient.ServerError):
"""Raised if user does not have permission to
access the workspace.
"""
@staticmethod
def is_permissions_error(err):
"""Try to guess if the error string is a permission-denied error
for the narrative (i.e. the workspace the narrative is in).
"""
pat = re.compile("\s*[Uu]ser \w+ may not \w+ workspace.*")
return pat.match(err) is not None
def __init__(self, name=None, code=None, message=None, **kw):
WorkspaceClient.ServerError.__init__(self, name, code,
message, **kw)
# List of fields returned by the list_workspace_objects function
list_ws_obj_fields = ['id','type','moddate','instance','command',
'lastmodifier','owner','workspace','ref','chsum',
'metadata','objid']
# The list_workspace_objects method has been deprecated, the
# list_objects method is the current primary method for fetching
# objects, and has a different field list
list_objects_fields = ['objid', 'name', 'type', 'save_date', 'ver', 'saved_by',
'wsid', 'workspace', 'chsum', 'size', 'meta']
obj_field = dict(zip(list_objects_fields,range(len(list_objects_fields))))
# object type for a project tag object
ws_tag_type = 'KBaseNarrative.Metadata'
# object type for a project tag object
ws_narrative_type = 'KBaseNarrative.Narrative'
# object name for project tag
ws_tag = {'project' : '_project'}
def get_wsobj_meta(wsclient, objtype=ws_narrative_type, ws_id=None):
"""
Takes an initialized workspace client. Defaults to searching for
Narrative types in any workspace that the token has at least read access to.
If the ws field is specified then it will return the workspace metadata
for only the workspace specified
Returns a dictionary of object descriptions - the key is a workspace id of
the form "ws.{workspace_id}.obj.{object_id}" and the values are dictionaries
keyed on the list_ws_obj_field list above.
Raises: PermissionsError, if access is denied
"""
try:
if ws_id is None:
res = wsclient.list_objects({'type' : objtype,
'includeMetadata' : 1})
else:
res = wsclient.list_objects({'type' : objtype,
'includeMetadata' : 1,
'ids' : [ws_id] })
except WorkspaceClient.ServerError, err:
if PermissionsError.is_permissions_error(err.message):
raise PermissionsError(name=err.name, code=err.code,
message=err.message, data=err.data)
my_narratives = {}
for obj in res:
my_narratives["ws.%s.obj.%s" % (obj[obj_field['wsid']],obj[obj_field['objid']])] = dict(zip(list_objects_fields,obj))
return my_narratives
def get_wsid(wsclient, workspace):
"""
When given a workspace name, returns the numeric ws_id
"""
try:
ws_meta = wsclient.get_workspace_info({'workspace' : workspace});
except WorkspaceClient.ServerError, e:
if e.message.find('not found') >= 0 or e.message.find('No workspace with name') >= 0:
return(None)
else:
raise e
return( ws_meta[0])
def alter_workspace_metadata(wsclient, ref, new_metadata={}, ws_id=None):
"""
This is just a wrapper for the workspace get_objects call.
Takes an initialized workspace client and a workspace ID
of the form "ws.{ws_id}.obj.{object id}" and returns the following:
{
'data' : {actual data contained in the object},
'metadata' : { a dictionary version of the object metadata },
... all the fields that are normally returned in a ws ObjectData type
}
if type is not specified then an extra lookup for object metadata
is required, this can be shortcut by passing in the object type
"""
if ws_id is None and ref is not None:
match = ws_regex.match(ref)
if not match:
raise BadWorkspaceID("%s does not match workspace ID format ws.{workspace id}.obj.{object id}" % ws_id)
ws_id = match.group(1)
elif ws_id is None and ref is None:
raise BadWorkspaceID("No workspace id or object reference given!")
wsclient.alter_workspace_metadata({'wsi':{'id':ws_id}, 'new':new_metadata})
def get_wsobj(wsclient, ws_id, objtype=None):
"""
This is just a wrapper for the workspace get_objects call.
Takes an initialized workspace client and a workspace ID
of the form "ws.{ws_id}.obj.{object id}" and returns the following:
{
'data' : {actual data contained in the object},
'metadata' : { a dictionary version of the object metadata },
... all the fields that are normally returned in a ws ObjectData type
}
if type is not specified then an extra lookup for object metadata
is required, this can be shortcut by passing in the object type
"""
match = ws_regex.match( ws_id)
if not match:
raise BadWorkspaceID("%s does not match workspace ID format ws.{workspace id}.obj.{object id}" % ws_id)
ws = match.group(1)
objid = match.group(2)
objs = wsclient.get_objects([dict( wsid=ws, objid=objid)])
if len(objs) < 1:
raise BadWorkspaceID( "%s could not be found" % ws_id)
elif len(objs) > 1:
raise BadWorkspaceID( "%s non-unique! Weird!!!" % ws_id)
res=objs[0]
res['metadata'] = dict(zip(list_objects_fields,objs[0]['info']))
return res
def delete_wsobj(wsclient, wsid, objid):
"""
Given a workspace client, and numeric workspace id and object id, delete it
returns true on success, false otherwise
"""
try:
wsclient.delete_objects( [{ 'wsid' : wsid,
'objid' : objid }] )
except WorkspaceClient.ServerError, e:
raise e
# return False
return True
# Write an object to the workspace, takes the workspace id, an object of the
# type workspace.ObjectSaveData
# typedef structure {
# type_string type;
# UnspecifiedObject data;
# obj_name name;
# obj_id objid;
# usermeta meta;
# list<ProvenanceAction> provenance;
# boolean hidden;
# } ObjectSaveData;
def rename_wsobj(wsclient, identity, new_name):
"""
Given an object's identity, change that object's name.
"""
try:
obj_info = wsclient.rename_object({ 'obj' : identity,
'new_name' : new_name })
except WorkspaceClient.ServerError, e:
raise e
return dict(zip(list_objects_fields, obj_info))
def put_wsobj(wsclient, ws_id, obj):
try:
ws_meta = wsclient.save_objects({ 'id' : ws_id,
'objects' : [obj] })
except:
raise
return dict(zip(list_objects_fields,ws_meta[0]))
# Tag a workspace as a project, if there is an error, let it propagate up
def check_project_tag(wsclient, ws_id):
try:
tag = wsclient.get_object_info( [{ 'wsid' : ws_id,
'name' : ws_tag['project'] }],
0);
except WorkspaceClient.ServerError, e:
# If it is a not found error, create it, otherwise reraise
if e.message.find('not found') >= 0 or e.message.find('No object with name') >= 0:
obj_save_data = { 'name' : ws_tag['project'],
'type' :ws_tag_type,
'data' : { 'description' : 'Tag! You\'re a project!'},
'meta' : {},
'provenance' : [],
'hidden' : 1}
ws_meta = wsclient.save_objects( { 'id' : ws_id,
'objects' : [obj_save_data]});
else:
raise e
return True
def get_user_id(wsclient):
"""Grab the userid from the token in the wsclient object
This is a pretty brittle way to do things, and will need to be
changed, eventually.
"""
try:
token = wsclient._headers.get('AUTHORIZATION', None)
if token is None:
g_log.error("auth.error No 'AUTHORIZATION' key found "
"in client headers: '{}'"
.format(wsclient._headers))
return None
match = user_id_regex.match(token)
if match:
return match.group(1)
else:
return None
except Exception, e:
g_log.error("Cannot get userid: {}".format(e))
raise e
def check_homews(wsclient, user_id = None):
"""
Helper routine to make sure that the user's home workspace is built. Putting it here
so that when/if it changes we only have a single place to change things.
Takes a wsclient, and if it is authenticated, extracts the user_id from the token
and will check for the existence of the home workspace and
create it if necessary. Will pass along any exceptions. Will also make sure that
it is tagged with a workspace_meta object named "_project"
returns the workspace name and workspace id as a tuple
Note that parsing the token from the wsclient object is brittle and should be changed!
"""
if user_id is None:
user_id = get_user_id(wsclient)
try:
homews = "%s:home" % user_id
workspace_identity = { 'workspace' : homews }
ws_meta = wsclient.get_workspace_info( workspace_identity)
except WorkspaceClient.ServerError, e:
# If it is a not found error, create it, otherwise reraise
if e.message.find('not found') >= 0 or e.message.find('No workspace with name') >= 0:
ws_meta = wsclient.create_workspace({ 'workspace' : homews,
'globalread' : 'n',
'description' : 'User home workspace'})
elif e.message.find('deleted') >= 0:
wsclient.undelete_workspace( { 'workspace' : homews})
ws_meta = wsclient.get_workspace_info( workspace_identity)
else:
raise e
if ws_meta:
# check_project_tag(wsclient, ws_meta[0])
# return the textual name and the numeric ws_id
return ws_meta[1],ws_meta[0]
else:
raise Exception('Unable to find or create or undelete home workspace: %s' % homews)
|
rsutormin/narrative
|
src/biokbase/narrative/ws_util.py
|
Python
|
mit
| 11,249
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gramps.gen.plug._pluginreg import *
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
MODULE_VERSION="5.1"
# this is the default in gen/plug/_pluginreg.py: plg.require_active = True
#------------------------------------------------------------------------
#
# Ancestor Tree
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ancestor_chart,BKI'
plg.name = _("Ancestor Chart")
plg.description = _("Produces a graphical ancestral chart")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'ancestortree.py'
plg.ptype = REPORT
plg.authors = ["Craig J. Anderson"]
plg.authors_email = ["ander882@hotmail.com"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'AncestorTree'
plg.optionclass = 'AncestorTreeOptions'
plg.report_modes = [REPORT_MODE_BKI]
plg = newplugin()
plg.id = 'ancestor_chart'
plg.name = _("Ancestor Tree")
plg.description = _("Produces a graphical ancestral tree")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'ancestortree.py'
plg.ptype = REPORT
plg.authors = ["Craig J. Anderson"]
plg.authors_email = ["ander882@hotmail.com"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'AncestorTree'
plg.optionclass = 'AncestorTreeOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_CLI]
#------------------------------------------------------------------------
#
# Calendar
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'calendar'
plg.name = _("Calendar")
plg.description = _("Produces a graphical calendar")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'calendarreport.py'
plg.ptype = REPORT
plg.authors = ["Douglas S. Blank"]
plg.authors_email = ["dblank@cs.brynmawr.edu"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'Calendar'
plg.optionclass = 'CalendarOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
#------------------------------------------------------------------------
#
# Descendant Tree
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'descend_chart,BKI'
plg.name = _("Descendant Chart")
plg.description = _("Produces a graphical descendant chart")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'descendtree.py'
plg.ptype = REPORT
plg.authors = ["Craig J. Anderson"]
plg.authors_email = ["ander882@hotmail.com"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'DescendTree'
plg.optionclass = 'DescendTreeOptions'
plg.report_modes = [REPORT_MODE_BKI]
plg = newplugin()
plg.id = 'descend_chart'
plg.name = _("Descendant Tree")
plg.description = _("Produces a graphical descendant tree")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'descendtree.py'
plg.ptype = REPORT
plg.authors = ["Craig J. Anderson"]
plg.authors_email = ["ander882@hotmail.com"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'DescendTree'
plg.optionclass = 'DescendTreeOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_CLI]
#------------------------------------------------------------------------
#
# Family Descendant Tree
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'family_descend_chart,BKI'
plg.name = _("Family Descendant Chart")
plg.description = _("Produces a graphical descendant chart around a family")
plg.version = '1.0'
plg.status = STABLE
plg.fname = 'descendtree.py'
plg.ptype = REPORT
plg.category = CATEGORY_DRAW
plg.gramps_target_version = MODULE_VERSION
plg.authors = ["Craig J. Anderson"]
plg.authors_email = ["ander882@hotmail.com"]
plg.require_active = True
plg.reportclass = 'DescendTree'
plg.optionclass = 'DescendTreeOptions'
plg.report_modes = [REPORT_MODE_BKI]
plg = newplugin()
plg.id = 'family_descend_chart'
plg.name = _("Family Descendant Tree")
plg.description = _("Produces a graphical descendant tree around a family")
plg.version = '1.0'
plg.status = STABLE
plg.fname = 'descendtree.py'
plg.ptype = REPORT
plg.category = CATEGORY_DRAW
plg.gramps_target_version = MODULE_VERSION
plg.authors = ["Craig J. Anderson"]
plg.authors_email = ["ander882@hotmail.com"]
plg.require_active = True
plg.reportclass = 'DescendTree'
plg.optionclass = 'DescendTreeOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_CLI]
#------------------------------------------------------------------------
#
# Fan Chart
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'fan_chart'
plg.name = _("Fan Chart")
plg.description = _("Produces fan charts")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'fanchart.py'
plg.ptype = REPORT
plg.authors = ["Donald N. Allingham"]
plg.authors_email = ["don@gramps-project.org"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'FanChart'
plg.optionclass = 'FanChartOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
#------------------------------------------------------------------------
#
# Statistics Charts
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'statistics_chart'
plg.name = _("Statistics Charts")
plg.description = _("Produces statistical bar and pie charts of the people "
"in the database")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'statisticschart.py'
plg.ptype = REPORT
plg.authors = ["Eero Tamminen"]
plg.authors_email = [""]
plg.category = CATEGORY_DRAW
plg.reportclass = 'StatisticsChart'
plg.optionclass = 'StatisticsChartOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
plg.require_active = False
#------------------------------------------------------------------------
#
# Timeline Chart
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'timeline'
plg.name = _("Timeline Chart")
plg.description = _("Produces a timeline chart.")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'timeline.py'
plg.ptype = REPORT
plg.authors = ["Donald N. Allingham"]
plg.authors_email = ["don@gramps-project.org"]
plg.category = CATEGORY_DRAW
plg.reportclass = 'TimeLine'
plg.optionclass = 'TimeLineOptions'
plg.report_modes = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
|
dermoth/gramps
|
gramps/plugins/drawreport/drawplugins.gpr.py
|
Python
|
gpl-2.0
| 7,435
|
import dns
import os
import socket
import struct
from recursortests import RecursorTest
class testKeepOpenTCP(RecursorTest):
_confdir = 'KeepOpenTCP'
_config_template = """dnssec=validate
packetcache-ttl=10
packetcache-servfail-ttl=10
auth-zones=authzone.example=configs/%s/authzone.zone""" % _confdir
@classmethod
def generateRecursorConfig(cls, confdir):
authzonepath = os.path.join(confdir, 'authzone.zone')
with open(authzonepath, 'w') as authzone:
authzone.write("""$ORIGIN authzone.example.
@ 3600 IN SOA {soa}
@ 3600 IN A 192.0.2.88
""".format(soa=cls._SOA))
super(testKeepOpenTCP, cls).generateRecursorConfig(confdir)
def sendTCPQueryKeepOpen(cls, sock, query, timeout=2.0):
try:
wire = query.to_wire()
sock.send(struct.pack("!H", len(wire)))
sock.send(wire)
data = sock.recv(2)
if data:
(datalen,) = struct.unpack("!H", data)
data = sock.recv(datalen)
except socket.timeout as e:
print("Timeout: %s" % (str(e)))
data = None
except socket.error as e:
print("Network error: %s" % (str(e)))
data = None
message = None
if data:
message = dns.message.from_wire(data)
return message
def testNoTrailingData(self):
count = 10
sock = [None] * count
expected = dns.rrset.from_text('ns.secure.example.', 0, dns.rdataclass.IN, 'A', '{prefix}.9'.format(prefix=self._PREFIX))
query = dns.message.make_query('ns.secure.example', 'A', want_dnssec=True)
query.flags |= dns.flags.AD
for i in range(count):
sock[i] = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock[i].settimeout(2.0)
sock[i].connect(("127.0.0.1", self._recursorPort))
res = self.sendTCPQueryKeepOpen(sock[i], query)
self.assertMessageIsAuthenticated(res)
self.assertRRsetInAnswer(res, expected)
self.assertMatchingRRSIGInAnswer(res, expected)
sock[i].settimeout(0.1)
try:
data = sock[i].recv(1)
self.assertTrue(False)
except socket.timeout as e:
print("ok")
for i in range(count):
sock[i].settimeout(2.0)
res = self.sendTCPQueryKeepOpen(sock[i], query)
self.assertMessageIsAuthenticated(res)
self.assertRRsetInAnswer(res, expected)
self.assertMatchingRRSIGInAnswer(res, expected)
sock[i].settimeout(0.1)
try:
data = sock[i].recv(1)
self.assertTrue(False)
except socket.timeout as e:
print("ok")
for i in range(count):
sock[i].close()
|
PowerDNS/pdns
|
regression-tests.recursor-dnssec/test_KeepOpenTCP.py
|
Python
|
gpl-2.0
| 2,856
|
# -*- coding: utf-8 -*-
import re
from collections import OrderedDict
from odoo import api, fields, models, _
from PIL import Image
from cStringIO import StringIO
import babel
from odoo.tools import html_escape as escape, posix_to_ldml, safe_eval, float_utils
from .qweb import unicodifier
import logging
_logger = logging.getLogger(__name__)
def nl2br(string):
""" Converts newlines to HTML linebreaks in ``string``. returns
the unicode result
:param str string:
:rtype: unicode
"""
return unicodifier(string).replace(u'\n', u'<br>\n')
def html_escape(string, options):
""" Automatically escapes content unless options['html-escape']
is set to False
:param str string:
:param dict options:
"""
return escape(string) if not options or options.get('html-escape', True) else string
#--------------------------------------------------------------------
# QWeb Fields converters
#--------------------------------------------------------------------
class FieldConverter(models.AbstractModel):
""" Used to convert a t-field specification into an output HTML field.
:meth:`~.to_html` is the entry point of this conversion from QWeb, it:
* converts the record value to html using :meth:`~.record_to_html`
* generates the metadata attributes (``data-oe-``) to set on the root
result node
* generates the root result node itself through :meth:`~.render_element`
"""
_name = 'ir.qweb.field'
@api.model
def attributes(self, record, field_name, options, values=None):
""" attributes(record, field_name, field, options, values)
Generates the metadata attributes (prefixed by ``data-oe-``) for the
root node of the field conversion.
The default attributes are:
* ``model``, the name of the record's model
* ``id`` the id of the record to which the field belongs
* ``type`` the logical field type (widget, may not match the field's
``type``, may not be any Field subclass name)
* ``translate``, a boolean flag (``0`` or ``1``) denoting whether the
field is translatable
* ``readonly``, has this attribute if the field is readonly
* ``expression``, the original expression
:returns: OrderedDict (attribute name, attribute value).
"""
data = OrderedDict()
field = record._fields[field_name]
if not options['inherit_branding'] and not options['translate']:
return data
data['data-oe-model'] = record._name
data['data-oe-id'] = record.id
data['data-oe-field'] = field.name
data['data-oe-type'] = options.get('type')
data['data-oe-expression'] = options.get('expression')
if field.readonly:
data['data-oe-readonly'] = 1
return data
@api.model
def value_to_html(self, value, options):
""" value_to_html(value, field, options=None)
Converts a single value to its HTML version/output
:rtype: unicode
"""
return html_escape(unicodifier(value) or u'', options)
@api.model
def record_to_html(self, record, field_name, options):
""" record_to_html(record, field_name, options)
Converts the specified field of the browse_record ``record`` to HTML
:rtype: unicode
"""
if not record:
return False
value = record[field_name]
return False if value is False else record.env[self._name].value_to_html(value, options=options)
@api.model
def user_lang(self):
""" user_lang()
Fetches the res.lang record corresponding to the language code stored
in the user's context. Fallbacks to en_US if no lang is present in the
context *or the language code is not valid*.
:returns: res.lang browse_record
"""
lang_code = self._context.get('lang') or 'en_US'
return self.env['res.lang']._lang_get(lang_code)
class IntegerConverter(models.AbstractModel):
_name = 'ir.qweb.field.integer'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
return unicodifier(self.user_lang().format('%d', value, grouping=True))
class FloatConverter(models.AbstractModel):
_name = 'ir.qweb.field.float'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if 'decimal_precision' in options:
precision = self.env['decimal.precision'].search([('name', '=', options['decimal_precision'])]).digits
else:
precision = options['precision']
if precision is None:
fmt = '%f'
else:
value = float_utils.float_round(value, precision_digits=precision)
fmt = '%.{precision}f'.format(precision=precision)
formatted = self.user_lang().format(fmt, value, grouping=True)
# %f does not strip trailing zeroes. %g does but its precision causes
# it to switch to scientific notation starting at a million *and* to
# strip decimals. So use %f and if no precision was specified manually
# strip trailing 0.
if precision is None:
formatted = re.sub(r'(?:(0|\d+?)0+)$', r'\1', formatted)
return unicodifier(formatted)
@api.model
def record_to_html(self, record, field_name, options):
if 'precision' not in options and 'decimal_precision' not in options:
_, precision = record._fields[field_name].digits or (None, None)
options = dict(options, precision=precision)
return super(FloatConverter, self).record_to_html(record, field_name, options)
class DateConverter(models.AbstractModel):
_name = 'ir.qweb.field.date'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value or len(value) < 10:
return ''
lang = self.user_lang()
locale = babel.Locale.parse(lang.code)
if isinstance(value, basestring):
value = fields.Datetime.from_string(value[:10])
if options and 'format' in options:
pattern = options['format']
else:
strftime_pattern = lang.date_format
pattern = posix_to_ldml(strftime_pattern, locale=locale)
return babel.dates.format_date(value, format=pattern, locale=locale)
class DateTimeConverter(models.AbstractModel):
_name = 'ir.qweb.field.datetime'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value:
return ''
lang = self.user_lang()
locale = babel.Locale.parse(lang.code)
if isinstance(value, basestring):
value = fields.Datetime.from_string(value)
value = fields.Datetime.context_timestamp(self, value)
if options and 'format' in options:
pattern = options['format']
else:
strftime_pattern = (u"%s %s" % (lang.date_format, lang.time_format))
pattern = posix_to_ldml(strftime_pattern, locale=locale)
if options and options.get('hide_seconds'):
pattern = pattern.replace(":ss", "").replace(":s", "")
return unicodifier(babel.dates.format_datetime(value, format=pattern, locale=locale))
class TextConverter(models.AbstractModel):
_name = 'ir.qweb.field.text'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
"""
Escapes the value and converts newlines to br. This is bullshit.
"""
return nl2br(html_escape(value, options)) if value else ''
class SelectionConverter(models.AbstractModel):
_name = 'ir.qweb.field.selection'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value:
return ''
return html_escape(unicodifier(options['selection'][value]) or u'', options)
@api.model
def record_to_html(self, record, field_name, options):
if 'selection' not in options:
options = dict(options, selection=dict(record._fields[field_name].get_description(self.env)['selection']))
return super(SelectionConverter, self).record_to_html(record, field_name, options)
class ManyToOneConverter(models.AbstractModel):
_name = 'ir.qweb.field.many2one'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value:
return False
value = value.sudo().display_name
if not value:
return False
return nl2br(html_escape(value, options)) if value else ''
class HTMLConverter(models.AbstractModel):
_name = 'ir.qweb.field.html'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
return unicodifier(value) or u''
class ImageConverter(models.AbstractModel):
""" ``image`` widget rendering, inserts a data:uri-using image tag in the
document. May be overridden by e.g. the website module to generate links
instead.
.. todo:: what happens if different output need different converters? e.g.
reports may need embedded images or FS links whereas website
needs website-aware
"""
_name = 'ir.qweb.field.image'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
try:
image = Image.open(StringIO(value.decode('base64')))
image.verify()
except IOError:
raise ValueError("Non-image binary fields can not be converted to HTML")
except: # image.verify() throws "suitable exceptions", I have no idea what they are
raise ValueError("Invalid image content")
return unicodifier('<img src="data:%s;base64,%s">' % (Image.MIME[image.format], value))
class MonetaryConverter(models.AbstractModel):
""" ``monetary`` converter, has a mandatory option
``display_currency`` only if field is not of type Monetary.
Otherwise, if we are in presence of a monetary field, the field definition must
have a currency_field attribute set.
The currency is used for formatting *and rounding* of the float value. It
is assumed that the linked res_currency has a non-empty rounding value and
res.currency's ``round`` method is used to perform rounding.
.. note:: the monetary converter internally adds the qweb context to its
options mapping, so that the context is available to callees.
It's set under the ``_values`` key.
"""
_name = 'ir.qweb.field.monetary'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
display_currency = options['display_currency']
# lang.format mandates a sprintf-style format. These formats are non-
# minimal (they have a default fixed precision instead), and
# lang.format will not set one by default. currency.round will not
# provide one either. So we need to generate a precision value
# (integer > 0) from the currency's rounding (a float generally < 1.0).
fmt = "%.{0}f".format(display_currency.decimal_places)
if options.get('from_currency'):
value = options['from_currency'].compute(value, display_currency)
lang = self.user_lang()
formatted_amount = lang.format(fmt, display_currency.round(value),
grouping=True, monetary=True).replace(r' ', u'\N{NO-BREAK SPACE}')
pre = post = u''
if display_currency.position == 'before':
pre = u'{symbol}\N{NO-BREAK SPACE}'.format(symbol=display_currency.symbol or '')
else:
post = u'\N{NO-BREAK SPACE}{symbol}'.format(symbol=display_currency.symbol or '')
return u'{pre}<span class="oe_currency_value">{0}</span>{post}'.format(formatted_amount, pre=pre, post=post)
@api.model
def record_to_html(self, record, field_name, options):
options = dict(options)
#currency should be specified by monetary field
field = record._fields[field_name]
if not options.get('display_currency') and field.type == 'monetary' and field.currency_field:
options['display_currency'] = record[field.currency_field]
return self.value_to_html(record[field_name], options)
TIMEDELTA_UNITS = (
('year', 3600 * 24 * 365),
('month', 3600 * 24 * 30),
('week', 3600 * 24 * 7),
('day', 3600 * 24),
('hour', 3600),
('minute', 60),
('second', 1)
)
class DurationConverter(models.AbstractModel):
""" ``duration`` converter, to display integral or fractional values as
human-readable time spans (e.g. 1.5 as "1 hour 30 minutes").
Can be used on any numerical field.
Has a mandatory option ``unit`` which can be one of ``second``, ``minute``,
``hour``, ``day``, ``week`` or ``year``, used to interpret the numerical
field value before converting it.
Sub-second values will be ignored.
"""
_name = 'ir.qweb.field.duration'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
units = dict(TIMEDELTA_UNITS)
if value < 0:
raise ValueError(_("Durations can't be negative"))
if not options or options.get('unit') not in units:
raise ValueError(_("A unit must be provided to duration widgets"))
locale = babel.Locale.parse(self.user_lang().code)
factor = units[options['unit']]
sections = []
r = value * factor
if options.get('round') in units:
round_to = units[options['round']]
r = round(r / round_to) * round_to
for unit, secs_per_unit in TIMEDELTA_UNITS:
v, r = divmod(r, secs_per_unit)
if not v:
continue
section = babel.dates.format_timedelta(
v*secs_per_unit, threshold=1, locale=locale)
if section:
sections.append(section)
return u' '.join(sections)
class RelativeDatetimeConverter(models.AbstractModel):
_name = 'ir.qweb.field.relative'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
locale = babel.Locale.parse(self.user_lang().code)
if isinstance(value, basestring):
value = fields.Datetime.from_string(value)
# value should be a naive datetime in UTC. So is fields.Datetime.now()
reference = fields.Datetime.from_string(options['now'])
return unicodifier(babel.dates.format_timedelta(value - reference, add_direction=True, locale=locale))
@api.model
def record_to_html(self, record, field_name, options):
if 'now' not in options:
options = dict(options, now=record._fields[field_name].now())
return super(RelativeDatetimeConverter, self).record_to_html(record, field_name, options)
class Contact(models.AbstractModel):
_name = 'ir.qweb.field.contact'
_inherit = 'ir.qweb.field.many2one'
@api.model
def value_to_html(self, value, options):
if not value.exists():
return False
opf = options and options.get('fields') or ["name", "address", "phone", "mobile", "fax", "email"]
value = value.sudo().with_context(show_address=True)
name_get = value.name_get()[0][1]
val = {
'name': name_get.split("\n")[0],
'address': escape("\n".join(name_get.split("\n")[1:])).strip(),
'phone': value.phone,
'mobile': value.mobile,
'fax': value.fax,
'city': value.city,
'country_id': value.country_id.display_name,
'website': value.website,
'email': value.email,
'fields': opf,
'object': value,
'options': options
}
return self.env['ir.qweb'].render('base.contact', val)
class QwebView(models.AbstractModel):
_name = 'ir.qweb.field.qweb'
_inherit = 'ir.qweb.field.many2one'
@api.model
def record_to_html(self, record, field_name, options):
if not getattr(record, field_name):
return None
view = getattr(record, field_name)
if view._name != "ir.ui.view":
_logger.warning("%s.%s must be a 'ir.ui.view' model." % (record, field_name))
return None
view = view.with_context(object=record)
return unicodifier(view.render(view._context, engine='ir.qweb'))
|
chienlieu2017/it_management
|
odoo/odoo/addons/base/ir/ir_qweb/fields.py
|
Python
|
gpl-3.0
| 16,571
|
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **GUI Test Cases.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'marco@opengis.ch'
__revision__ = '$Format:%H$'
__date__ = '19/05/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import unittest
import os
import logging
from qgis.core import QgsMapLayerRegistry
from safe.impact_functions import register_impact_functions
from safe.test.utilities import (
set_canvas_crs,
set_jakarta_extent,
GEOCRS,
load_standard_layers,
setup_scenario,
canvas_list,
get_qgis_app)
# AG: get_qgis_app() should be called before importing modules from
# safe.gui.widgets.dock
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app()
from safe.gui.widgets.dock import Dock
DOCK = Dock(IFACE)
LOGGER = logging.getLogger('InaSAFE')
# noinspection PyArgumentList
class PostprocessorManagerTest(unittest.TestCase):
"""Test the postprocessor manager"""
# noinspection PyPep8Naming
def setUp(self):
"""Fixture run before all tests"""
os.environ['LANG'] = 'en'
DOCK.show_only_visible_layers_flag = True
load_standard_layers(DOCK)
DOCK.cboHazard.setCurrentIndex(0)
DOCK.cboExposure.setCurrentIndex(0)
DOCK.cboFunction.setCurrentIndex(0)
DOCK.run_in_thread_flag = False
DOCK.show_only_visible_layers_flag = False
DOCK.set_layer_from_title_flag = False
DOCK.zoom_to_impact_flag = False
DOCK.hide_exposure_flag = False
DOCK.show_intermediate_layers = False
set_jakarta_extent()
register_impact_functions()
def tearDown(self):
"""Run after each test."""
# Let's use a fresh registry, canvas, and dock for each test!
QgsMapLayerRegistry.instance().removeAllMapLayers()
DOCK.cboHazard.clear()
DOCK.cboExposure.clear()
# noinspection PyMethodMayBeStatic
def test_check_postprocessing_layers_visibility(self):
"""Generated layers are not added to the map registry."""
# Explicitly disable showing intermediate layers
DOCK.show_intermediate_layers = False
# with KAB_NAME aggregation attribute defined in .keyword using
# kabupaten_jakarta_singlepart.shp
result, message = setup_scenario(
DOCK,
hazard='Continuous Flood',
exposure='Population',
function_id='FloodEvacuationRasterHazardFunction',
aggregation_layer=u"Dístríct's of Jakarta")
set_jakarta_extent(dock=DOCK)
assert result, message
# LOGGER.info("Registry list before:\n%s" %
# QgsMapLayerRegistry.instance().mapLayers())
# one layer (the impact) should have been added
expected_count = len(CANVAS.layers()) + 1
#
# Press RUN
DOCK.accept()
# no KW dialog will popuo due to complete keywords
after_count = len(CANVAS.layers())
# LOGGER.info("Registry list after:\n%s" %
# QgsMapLayerRegistry.instance().mapLayers())
message = (
'Expected %s items in canvas, got %s' %
(expected_count, after_count))
assert expected_count == after_count, message
# Now run again showing intermediate layers
DOCK.show_intermediate_layers = True
# Press RUN
DOCK.accept()
# no KW dialog will popup due to complete keywords
# one layer (the impact) should have been added
expected_count += 2
after_count = len(CANVAS.layers())
LOGGER.info("Canvas list after:\n %s" % canvas_list())
message = (
'Expected %s items in canvas, got %s' %
(expected_count, after_count))
# We expect two more since we enabled showing intermediate layers
assert expected_count == after_count, message
# noinspection PyMethodMayBeStatic
def test_post_processor_output(self):
"""Check that the post processor does not add spurious report rows."""
# with KAB_NAME aggregation attribute defined in .keyword using
# kabupaten_jakarta_singlepart.shp
result, message = setup_scenario(
DOCK,
hazard='Continuous Flood',
exposure='Population',
function_id='FloodEvacuationRasterHazardFunction')
# Enable on-the-fly reprojection
set_canvas_crs(GEOCRS, True)
set_jakarta_extent()
assert result, message
# Press RUN
DOCK.accept()
message = 'Spurious 0 filled rows added to post processing report.'
result = DOCK.wvResults.page().currentFrame().toPlainText()
for line in result.split('\n'):
if 'Entire area' in line:
tokens = str(line).split('\t')
tokens = tokens[1:]
total = 0
for token in tokens:
total += float(token.replace(',', ''))
assert total != 0, message
if __name__ == '__main__':
suite = unittest.makeSuite(PostprocessorManagerTest)
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
|
wonder-sk/inasafe
|
safe/impact_statistics/test/test_postprocessor_manager.py
|
Python
|
gpl-3.0
| 5,518
|
# (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import base64
import imp
import json
import os
import shlex
import zipfile
from io import BytesIO
# from Ansible
from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.utils.unicode import to_bytes, to_unicode
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
from ansible.plugins import strategy
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
REPLACER_VERSION = b"\"<<ANSIBLE_VERSION>>\""
REPLACER_COMPLEX = b"\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
REPLACER_WINDOWS = b"# POWERSHELL_COMMON"
REPLACER_JSONARGS = b"<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
REPLACER_SELINUX = b"<<SELINUX_SPECIAL_FILESYSTEMS>>"
# We could end up writing out parameters with unicode characters so we need to
# specify an encoding for the python source file
ENCODING_STRING = u'# -*- coding: utf-8 -*-'
# we've moved the module_common relative to the snippets, so fix the path
_SNIPPET_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils')
# ******************************************************************************
ZIPLOADER_TEMPLATE = u'''%(shebang)s
%(coding)s
ZIPLOADER_WRAPPER = True # For test-module script to tell this is a ZIPLOADER_WRAPPER
# This code is part of Ansible, but is an independent component.
# The code in this particular templatable string, and this templatable string
# only, is BSD licensed. Modules which end up using this snippet, which is
# dynamically combined together by Ansible still belong to the author of the
# module, and they may assign their own license to the complete work.
#
# Copyright (c), James Cammarata, 2016
# Copyright (c), Toshio Kuratomi, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import base64
import shutil
import zipfile
import tempfile
import subprocess
if sys.version_info < (3,):
bytes = str
PY3 = False
else:
unicode = str
PY3 = True
try:
# Python-2.6+
from io import BytesIO as IOStream
except ImportError:
# Python < 2.6
from StringIO import StringIO as IOStream
ZIPDATA = """%(zipdata)s"""
def invoke_module(module, modlib_path, json_params):
pythonpath = os.environ.get('PYTHONPATH')
if pythonpath:
os.environ['PYTHONPATH'] = ':'.join((modlib_path, pythonpath))
else:
os.environ['PYTHONPATH'] = modlib_path
p = subprocess.Popen([%(interpreter)s, module], env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate(json_params)
if not isinstance(stderr, (bytes, unicode)):
stderr = stderr.read()
if not isinstance(stdout, (bytes, unicode)):
stdout = stdout.read()
if PY3:
sys.stderr.buffer.write(stderr)
sys.stdout.buffer.write(stdout)
else:
sys.stderr.write(stderr)
sys.stdout.write(stdout)
return p.returncode
def debug(command, zipped_mod, json_params):
# The code here normally doesn't run. It's only used for debugging on the
# remote machine.
#
# The subcommands in this function make it easier to debug ziploader
# modules. Here's the basic steps:
#
# Run ansible with the environment variable: ANSIBLE_KEEP_REMOTE_FILES=1 and -vvv
# to save the module file remotely::
# $ ANSIBLE_KEEP_REMOTE_FILES=1 ansible host1 -m ping -a 'data=october' -vvv
#
# Part of the verbose output will tell you where on the remote machine the
# module was written to::
# [...]
# <host1> SSH: EXEC ssh -C -q -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o
# PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o ConnectTimeout=10 -o
# ControlPath=/home/badger/.ansible/cp/ansible-ssh-%%h-%%p-%%r -tt rhel7 '/bin/sh -c '"'"'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8
# LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping'"'"''
# [...]
#
# Login to the remote machine and run the module file via from the previous
# step with the explode subcommand to extract the module payload into
# source files::
# $ ssh host1
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping explode
# Module expanded into:
# /home/badger/.ansible/tmp/ansible-tmp-1461173408.08-279692652635227/ansible
#
# You can now edit the source files to instrument the code or experiment with
# different parameter values. When you're ready to run the code you've modified
# (instead of the code from the actual zipped module), use the execute subcommand like this::
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping execute
# Okay to use __file__ here because we're running from a kept file
basedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'debug_dir')
args_path = os.path.join(basedir, 'args')
script_path = os.path.join(basedir, 'ansible_module_%(ansible_module)s.py')
if command == 'explode':
# transform the ZIPDATA into an exploded directory of code and then
# print the path to the code. This is an easy way for people to look
# at the code on the remote machine for debugging it in that
# environment
z = zipfile.ZipFile(zipped_mod)
for filename in z.namelist():
if filename.startswith('/'):
raise Exception('Something wrong with this module zip file: should not contain absolute paths')
dest_filename = os.path.join(basedir, filename)
if dest_filename.endswith(os.path.sep) and not os.path.exists(dest_filename):
os.makedirs(dest_filename)
else:
directory = os.path.dirname(dest_filename)
if not os.path.exists(directory):
os.makedirs(directory)
f = open(dest_filename, 'w')
f.write(z.read(filename))
f.close()
# write the args file
f = open(args_path, 'w')
f.write(json_params)
f.close()
print('Module expanded into:')
print('%%s' %% basedir)
exitcode = 0
elif command == 'execute':
# Execute the exploded code instead of executing the module from the
# embedded ZIPDATA. This allows people to easily run their modified
# code on the remote machine to see how changes will affect it.
# This differs slightly from default Ansible execution of Python modules
# as it passes the arguments to the module via a file instead of stdin.
# Set pythonpath to the debug dir
pythonpath = os.environ.get('PYTHONPATH')
if pythonpath:
os.environ['PYTHONPATH'] = ':'.join((basedir, pythonpath))
else:
os.environ['PYTHONPATH'] = basedir
p = subprocess.Popen([%(interpreter)s, script_path, args_path], env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if not isinstance(stderr, (bytes, unicode)):
stderr = stderr.read()
if not isinstance(stdout, (bytes, unicode)):
stdout = stdout.read()
if PY3:
sys.stderr.buffer.write(stderr)
sys.stdout.buffer.write(stdout)
else:
sys.stderr.write(stderr)
sys.stdout.write(stdout)
return p.returncode
elif command == 'excommunicate':
# This attempts to run the module in-process (by importing a main
# function and then calling it). It is not the way ansible generally
# invokes the module so it won't work in every case. It is here to
# aid certain debuggers which work better when the code doesn't change
# from one process to another but there may be problems that occur
# when using this that are only artifacts of how we're invoking here,
# not actual bugs (as they don't affect the real way that we invoke
# ansible modules)
# stub the args and python path
sys.argv = ['%(ansible_module)s', args_path]
sys.path.insert(0, basedir)
from ansible_module_%(ansible_module)s import main
main()
print('WARNING: Module returned to wrapper instead of exiting')
sys.exit(1)
else:
print('WARNING: Unknown debug command. Doing nothing.')
exitcode = 0
return exitcode
if __name__ == '__main__':
#
# See comments in the debug() method for information on debugging
#
ZIPLOADER_PARAMS = %(params)s
if PY3:
ZIPLOADER_PARAMS = ZIPLOADER_PARAMS.encode('utf-8')
try:
# There's a race condition with the controller removing the
# remote_tmpdir and this module executing under async. So we cannot
# store this in remote_tmpdir (use system tempdir instead)
temp_path = tempfile.mkdtemp(prefix='ansible_')
zipped_mod = os.path.join(temp_path, 'ansible_modlib.zip')
modlib = open(zipped_mod, 'wb')
modlib.write(base64.b64decode(ZIPDATA))
modlib.close()
if len(sys.argv) == 2:
exitcode = debug(sys.argv[1], zipped_mod, ZIPLOADER_PARAMS)
else:
z = zipfile.ZipFile(zipped_mod)
module = os.path.join(temp_path, 'ansible_module_%(ansible_module)s.py')
f = open(module, 'wb')
f.write(z.read('ansible_module_%(ansible_module)s.py'))
f.close()
exitcode = invoke_module(module, zipped_mod, ZIPLOADER_PARAMS)
finally:
try:
shutil.rmtree(temp_path)
except OSError:
# tempdir creation probably failed
pass
sys.exit(exitcode)
'''
def _strip_comments(source):
# Strip comments and blank lines from the wrapper
buf = []
for line in source.splitlines():
l = line.strip()
if not l or l.startswith(u'#'):
continue
buf.append(line)
return u'\n'.join(buf)
if C.DEFAULT_KEEP_REMOTE_FILES:
# Keep comments when KEEP_REMOTE_FILES is set. That way users will see
# the comments with some nice usage instructions
ACTIVE_ZIPLOADER_TEMPLATE = ZIPLOADER_TEMPLATE
else:
# ZIPLOADER_TEMPLATE stripped of comments for smaller over the wire size
ACTIVE_ZIPLOADER_TEMPLATE = _strip_comments(ZIPLOADER_TEMPLATE)
class ModuleDepFinder(ast.NodeVisitor):
# Caveats:
# This code currently does not handle:
# * relative imports from py2.6+ from . import urls
IMPORT_PREFIX_SIZE = len('ansible.module_utils.')
def __init__(self, *args, **kwargs):
"""
Walk the ast tree for the python module.
Save submodule[.submoduleN][.identifier] into self.submodules
self.submodules will end up with tuples like:
- ('basic',)
- ('urls', 'fetch_url')
- ('database', 'postgres')
- ('database', 'postgres', 'quote')
It's up to calling code to determine whether the final element of the
dotted strings are module names or something else (function, class, or
variable names)
"""
super(ModuleDepFinder, self).__init__(*args, **kwargs)
self.submodules = set()
def visit_Import(self, node):
# import ansible.module_utils.MODLIB[.MODLIBn] [as asname]
for alias in (a for a in node.names if a.name.startswith('ansible.module_utils.')):
py_mod = alias.name[self.IMPORT_PREFIX_SIZE:]
self.submodules.add((py_mod,))
self.generic_visit(node)
def visit_ImportFrom(self, node):
if node.module.startswith('ansible.module_utils'):
where_from = node.module[self.IMPORT_PREFIX_SIZE:]
if where_from:
# from ansible.module_utils.MODULE1[.MODULEn] import IDENTIFIER [as asname]
# from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [as asname]
# from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [,IDENTIFIER] [as asname]
py_mod = tuple(where_from.split('.'))
for alias in node.names:
self.submodules.add(py_mod + (alias.name,))
else:
# from ansible.module_utils import MODLIB [,MODLIB2] [as asname]
for alias in node.names:
self.submodules.add((alias.name,))
self.generic_visit(node)
def _slurp(path):
if not os.path.exists(path):
raise AnsibleError("imported module support code does not exist at %s" % os.path.abspath(path))
fd = open(path, 'rb')
data = fd.read()
fd.close()
return data
def _get_shebang(interpreter, task_vars, args=tuple()):
"""
Note not stellar API:
Returns None instead of always returning a shebang line. Doing it this
way allows the caller to decide to use the shebang it read from the
file rather than trust that we reformatted what they already have
correctly.
"""
interpreter_config = u'ansible_%s_interpreter' % os.path.basename(interpreter).strip()
if interpreter_config not in task_vars:
return (None, interpreter)
interpreter = task_vars[interpreter_config].strip()
shebang = u'#!' + interpreter
if args:
shebang = shebang + u' ' + u' '.join(args)
return (shebang, interpreter)
def recursive_finder(name, data, py_module_names, py_module_cache, zf):
"""
Using ModuleDepFinder, make sure we have all of the module_utils files that
the module its module_utils files needs.
"""
# Parse the module and find the imports of ansible.module_utils
tree = ast.parse(data)
finder = ModuleDepFinder()
finder.visit(tree)
#
# Determine what imports that we've found are modules (vs class, function.
# variable names) for packages
#
normalized_modules = set()
# Loop through the imports that we've found to normalize them
# Exclude paths that match with paths we've already processed
# (Have to exclude them a second time once the paths are processed)
for py_module_name in finder.submodules.difference(py_module_names):
module_info = None
# Check whether either the last or the second to last identifier is
# a module name
for idx in (1, 2):
if len(py_module_name) < idx:
break
try:
module_info = imp.find_module(py_module_name[-idx],
[os.path.join(_SNIPPET_PATH, *py_module_name[:-idx])])
break
except ImportError:
continue
# Could not find the module. Construct a helpful error message.
if module_info is None:
msg = ['Could not find imported module support code for %s. Looked for' % name]
if idx == 2:
msg.append('either %s or %s' % (py_module_name[-1], py_module_name[-2]))
else:
msg.append(py_module_name[-1])
raise AnsibleError(' '.join(msg))
if idx == 2:
# We've determined that the last portion was an identifier and
# thus, not part of the module name
py_module_name = py_module_name[:-1]
# If not already processed then we've got work to do
if py_module_name not in py_module_names:
# If not in the cache, then read the file into the cache
# We already have a file handle for the module open so it makes
# sense to read it now
if py_module_name not in py_module_cache:
if module_info[2][2] == imp.PKG_DIRECTORY:
# Read the __init__.py instead of the module file as this is
# a python package
py_module_cache[py_module_name + ('__init__',)] = _slurp(os.path.join(os.path.join(_SNIPPET_PATH, *py_module_name), '__init__.py'))
normalized_modules.add(py_module_name + ('__init__',))
else:
py_module_cache[py_module_name] = module_info[0].read()
module_info[0].close()
normalized_modules.add(py_module_name)
# Make sure that all the packages that this module is a part of
# are also added
for i in range(1, len(py_module_name)):
py_pkg_name = py_module_name[:-i] + ('__init__',)
if py_pkg_name not in py_module_names:
normalized_modules.add(py_pkg_name)
py_module_cache[py_pkg_name] = _slurp('%s.py' % os.path.join(_SNIPPET_PATH, *py_pkg_name))
#
# iterate through all of the ansible.module_utils* imports that we haven't
# already checked for new imports
#
# set of modules that we haven't added to the zipfile
unprocessed_py_module_names = normalized_modules.difference(py_module_names)
for py_module_name in unprocessed_py_module_names:
py_module_path = os.path.join(*py_module_name)
py_module_file_name = '%s.py' % py_module_path
zf.writestr(os.path.join("ansible/module_utils",
py_module_file_name), py_module_cache[py_module_name])
# Add the names of the files we're scheduling to examine in the loop to
# py_module_names so that we don't re-examine them in the next pass
# through recursive_finder()
py_module_names.update(unprocessed_py_module_names)
for py_module_file in unprocessed_py_module_names:
recursive_finder(py_module_file, py_module_cache[py_module_file], py_module_names, py_module_cache, zf)
# Save memory; the file won't have to be read again for this ansible module.
del py_module_cache[py_module_file]
def _is_binary(module_data):
textchars = bytearray(set([7, 8, 9, 10, 12, 13, 27]) | set(range(0x20, 0x100)) - set([0x7f]))
start = module_data[:1024]
return bool(start.translate(None, textchars))
def _find_snippet_imports(module_name, module_data, module_path, module_args, task_vars, module_compression):
"""
Given the source of the module, convert it to a Jinja2 template to insert
module code and return whether it's a new or old style module.
"""
module_substyle = module_style = 'old'
# module_style is something important to calling code (ActionBase). It
# determines how arguments are formatted (json vs k=v) and whether
# a separate arguments file needs to be sent over the wire.
# module_substyle is extra information that's useful internally. It tells
# us what we have to look to substitute in the module files and whether
# we're using module replacer or ziploader to format the module itself.
if _is_binary(module_data):
module_substyle = module_style = 'binary'
elif REPLACER in module_data:
# Do REPLACER before from ansible.module_utils because we need make sure
# we substitute "from ansible.module_utils basic" for REPLACER
module_style = 'new'
module_substyle = 'python'
module_data = module_data.replace(REPLACER, b'from ansible.module_utils.basic import *')
elif b'from ansible.module_utils.' in module_data:
module_style = 'new'
module_substyle = 'python'
elif REPLACER_WINDOWS in module_data:
module_style = 'new'
module_substyle = 'powershell'
elif REPLACER_JSONARGS in module_data:
module_style = 'new'
module_substyle = 'jsonargs'
elif b'WANT_JSON' in module_data:
module_substyle = module_style = 'non_native_want_json'
shebang = None
# Neither old-style, non_native_want_json nor binary modules should be modified
# except for the shebang line (Done by modify_module)
if module_style in ('old', 'non_native_want_json', 'binary'):
return module_data, module_style, shebang
output = BytesIO()
py_module_names = set()
if module_substyle == 'python':
params = dict(ANSIBLE_MODULE_ARGS=module_args,)
python_repred_params = to_bytes(repr(json.dumps(params)), errors='strict')
try:
compression_method = getattr(zipfile, module_compression)
except AttributeError:
display.warning(u'Bad module compression string specified: %s. Using ZIP_STORED (no compression)' % module_compression)
compression_method = zipfile.ZIP_STORED
lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ziploader_cache')
cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression))
zipdata = None
# Optimization -- don't lock if the module has already been cached
if os.path.exists(cached_module_filename):
display.debug('ZIPLOADER: using cached module: %s' % cached_module_filename)
zipdata = open(cached_module_filename, 'rb').read()
# Fool the check later... I think we should just remove the check
py_module_names.add(('basic',))
else:
if module_name in strategy.action_write_locks:
display.debug('ZIPLOADER: Using lock for %s' % module_name)
lock = strategy.action_write_locks[module_name]
else:
# If the action plugin directly invokes the module (instead of
# going through a strategy) then we don't have a cross-process
# Lock specifically for this module. Use the "unexpected
# module" lock instead
display.debug('ZIPLOADER: Using generic lock for %s' % module_name)
lock = strategy.action_write_locks[None]
display.debug('ZIPLOADER: Acquiring lock')
with lock:
display.debug('ZIPLOADER: Lock acquired: %s' % id(lock))
# Check that no other process has created this while we were
# waiting for the lock
if not os.path.exists(cached_module_filename):
display.debug('ZIPLOADER: Creating module')
# Create the module zip data
zipoutput = BytesIO()
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
zf.writestr('ansible/__init__.py', b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\ntry:\n from ansible.release import __version__,__author__\nexcept ImportError:\n __version__="' + to_bytes(__version__) + b'"\n __author__="' + to_bytes(__author__) + b'"\n')
zf.writestr('ansible/module_utils/__init__.py', b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n')
zf.writestr('ansible_module_%s.py' % module_name, module_data)
py_module_cache = { ('__init__',): b'' }
recursive_finder(module_name, module_data, py_module_names, py_module_cache, zf)
zf.close()
zipdata = base64.b64encode(zipoutput.getvalue())
# Write the assembled module to a temp file (write to temp
# so that no one looking for the file reads a partially
# written file)
if not os.path.exists(lookup_path):
# Note -- if we have a global function to setup, that would
# be a better place to run this
os.mkdir(lookup_path)
display.debug('ZIPLOADER: Writing module')
with open(cached_module_filename + '-part', 'w') as f:
f.write(zipdata)
# Rename the file into its final position in the cache so
# future users of this module can read it off the
# filesystem instead of constructing from scratch.
display.debug('ZIPLOADER: Renaming module')
os.rename(cached_module_filename + '-part', cached_module_filename)
display.debug('ZIPLOADER: Done creating module')
if zipdata is None:
display.debug('ZIPLOADER: Reading module after lock')
# Another process wrote the file while we were waiting for
# the write lock. Go ahead and read the data from disk
# instead of re-creating it.
try:
zipdata = open(cached_module_filename, 'rb').read()
except IOError:
raise AnsibleError('A different worker process failed to create module file. Look at traceback for that process for debugging information.')
# Fool the check later... I think we should just remove the check
py_module_names.add(('basic',))
shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars)
if shebang is None:
shebang = u'#!/usr/bin/python'
executable = interpreter.split(u' ', 1)
if len(executable) == 2 and executable[0].endswith(u'env'):
# Handle /usr/bin/env python style interpreter settings
interpreter = u"'{0}', '{1}'".format(*executable)
else:
# Still have to enclose the parts of the interpreter in quotes
# because we're substituting it into the template as a python
# string
interpreter = u"'{0}'".format(interpreter)
output.write(to_bytes(ACTIVE_ZIPLOADER_TEMPLATE % dict(
zipdata=zipdata,
ansible_module=module_name,
params=python_repred_params,
shebang=shebang,
interpreter=interpreter,
coding=ENCODING_STRING,
)))
module_data = output.getvalue()
# Sanity check from 1.x days. Maybe too strict. Some custom python
# modules that use ziploader may implement their own helpers and not
# need basic.py. All the constants that we substituted into basic.py
# for module_replacer are now available in other, better ways.
if ('basic',) not in py_module_names:
raise AnsibleError("missing required import in %s: Did not import ansible.module_utils.basic for boilerplate helper code" % module_path)
elif module_substyle == 'powershell':
# Module replacer for jsonargs and windows
lines = module_data.split(b'\n')
for line in lines:
if REPLACER_WINDOWS in line:
ps_data = _slurp(os.path.join(_SNIPPET_PATH, "powershell.ps1"))
output.write(ps_data)
py_module_names.add((b'powershell',))
continue
output.write(line + b'\n')
module_data = output.getvalue()
module_args_json = to_bytes(json.dumps(module_args))
module_data = module_data.replace(REPLACER_JSONARGS, module_args_json)
# Sanity check from 1.x days. This is currently useless as we only
# get here if we are going to substitute powershell.ps1 into the
# module anyway. Leaving it for when/if we add other powershell
# module_utils files.
if (b'powershell',) not in py_module_names:
raise AnsibleError("missing required import in %s: # POWERSHELL_COMMON" % module_path)
elif module_substyle == 'jsonargs':
module_args_json = to_bytes(json.dumps(module_args))
# these strings could be included in a third-party module but
# officially they were included in the 'basic' snippet for new-style
# python modules (which has been replaced with something else in
# ziploader) If we remove them from jsonargs-style module replacer
# then we can remove them everywhere.
python_repred_args = to_bytes(repr(module_args_json))
module_data = module_data.replace(REPLACER_VERSION, to_bytes(repr(__version__)))
module_data = module_data.replace(REPLACER_COMPLEX, python_repred_args)
module_data = module_data.replace(REPLACER_SELINUX, to_bytes(','.join(C.DEFAULT_SELINUX_SPECIAL_FS)))
# The main event -- substitute the JSON args string into the module
module_data = module_data.replace(REPLACER_JSONARGS, module_args_json)
facility = b'syslog.' + to_bytes(task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY), errors='strict')
module_data = module_data.replace(b'syslog.LOG_USER', facility)
return (module_data, module_style, shebang)
# ******************************************************************************
def modify_module(module_name, module_path, module_args, task_vars=dict(), module_compression='ZIP_STORED'):
"""
Used to insert chunks of code into modules before transfer rather than
doing regular python imports. This allows for more efficient transfer in
a non-bootstrapping scenario by not moving extra files over the wire and
also takes care of embedding arguments in the transferred modules.
This version is done in such a way that local imports can still be
used in the module code, so IDEs don't have to be aware of what is going on.
Example:
from ansible.module_utils.basic import *
... will result in the insertion of basic.py into the module
from the module_utils/ directory in the source tree.
All modules are required to import at least basic, though there will also
be other snippets.
For powershell, there's equivalent conventions like this:
# POWERSHELL_COMMON
which results in the inclusion of the common code from powershell.ps1
"""
with open(module_path, 'rb') as f:
# read in the module source
module_data = f.read()
(module_data, module_style, shebang) = _find_snippet_imports(module_name, module_data, module_path, module_args, task_vars, module_compression)
if module_style == 'binary':
return (module_data, module_style, shebang)
elif shebang is None:
lines = module_data.split(b"\n", 1)
if lines[0].startswith(b"#!"):
shebang = lines[0].strip()
args = shlex.split(str(shebang[2:]))
interpreter = args[0]
interpreter = to_bytes(interpreter)
new_shebang = to_bytes(_get_shebang(interpreter, task_vars, args[1:])[0], errors='strict', nonstring='passthru')
if new_shebang:
lines[0] = shebang = new_shebang
if os.path.basename(interpreter).startswith(b'python'):
lines.insert(1, to_bytes(ENCODING_STRING))
else:
# No shebang, assume a binary module?
pass
module_data = b"\n".join(lines)
else:
shebang = to_bytes(shebang, errors='strict')
return (module_data, module_style, shebang)
|
cmvelo/ansible
|
lib/ansible/executor/module_common.py
|
Python
|
gpl-3.0
| 33,348
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_vcmp_guest import Parameters
from library.modules.bigip_vcmp_guest import ModuleManager
from library.modules.bigip_vcmp_guest import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_vcmp_guest import Parameters
from ansible.modules.network.f5.bigip_vcmp_guest import ModuleManager
from ansible.modules.network.f5.bigip_vcmp_guest import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
initial_image='BIGIP-12.1.0.1.0.1447-HF1.iso',
mgmt_network='bridged',
mgmt_address='1.2.3.4/24',
vlans=[
'vlan1',
'vlan2'
]
)
p = Parameters(params=args)
assert p.initial_image == 'BIGIP-12.1.0.1.0.1447-HF1.iso'
assert p.mgmt_network == 'bridged'
def test_module_parameters_mgmt_bridged_without_subnet(self):
args = dict(
mgmt_network='bridged',
mgmt_address='1.2.3.4'
)
p = Parameters(params=args)
assert p.mgmt_network == 'bridged'
assert p.mgmt_address == '1.2.3.4/32'
def test_module_parameters_mgmt_address_cidr(self):
args = dict(
mgmt_network='bridged',
mgmt_address='1.2.3.4/24'
)
p = Parameters(params=args)
assert p.mgmt_network == 'bridged'
assert p.mgmt_address == '1.2.3.4/24'
def test_module_parameters_mgmt_address_subnet(self):
args = dict(
mgmt_network='bridged',
mgmt_address='1.2.3.4/255.255.255.0'
)
p = Parameters(params=args)
assert p.mgmt_network == 'bridged'
assert p.mgmt_address == '1.2.3.4/24'
def test_module_parameters_mgmt_route(self):
args = dict(
mgmt_route='1.2.3.4'
)
p = Parameters(params=args)
assert p.mgmt_route == '1.2.3.4'
def test_module_parameters_vcmp_software_image_facts(self):
# vCMP images may include a forward slash in their names. This is probably
# related to the slots on the system, but it is not a valid value to specify
# that slot when providing an initial image
args = dict(
initial_image='BIGIP-12.1.0.1.0.1447-HF1.iso/1',
)
p = Parameters(params=args)
assert p.initial_image == 'BIGIP-12.1.0.1.0.1447-HF1.iso/1'
def test_api_parameters(self):
args = dict(
initialImage="BIGIP-tmos-tier2-13.1.0.0.0.931.iso",
managementGw="2.2.2.2",
managementIp="1.1.1.1/24",
managementNetwork="bridged",
state="deployed",
vlans=[
"/Common/vlan1",
"/Common/vlan2"
]
)
p = Parameters(params=args)
assert p.initial_image == 'BIGIP-tmos-tier2-13.1.0.0.0.931.iso'
assert p.mgmt_route == '2.2.2.2'
assert p.mgmt_address == '1.1.1.1/24'
assert '/Common/vlan1' in p.vlans
assert '/Common/vlan2' in p.vlans
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.patcher1 = patch('time.sleep')
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_create_vlan(self, *args):
set_module_args(dict(
name="guest1",
mgmt_network="bridged",
mgmt_address="10.10.10.10/24",
initial_image="BIGIP-13.1.0.0.0.931.iso",
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.create_on_device = Mock(return_value=True)
mm.exists = Mock(return_value=False)
mm.is_deployed = Mock(side_effect=[False, True, True, True, True])
mm.deploy_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'guest1'
|
alexlo03/ansible
|
test/units/modules/network/f5/test_bigip_vcmp_guest.py
|
Python
|
gpl-3.0
| 5,742
|
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import json, traceback
from PyQt4.Qt import QDialogButtonBox
from calibre.gui2 import error_dialog, warning_dialog
from calibre.gui2.preferences import ConfigWidgetBase, test_widget
from calibre.gui2.preferences.template_functions_ui import Ui_Form
from calibre.gui2.widgets import PythonHighlighter
from calibre.utils.formatter_functions import (formatter_functions,
compile_user_function, load_user_template_functions)
class ConfigWidget(ConfigWidgetBase, Ui_Form):
def genesis(self, gui):
self.gui = gui
self.db = gui.library_view.model().db
help_text = _('''
<p>Here you can add and remove functions used in template processing. A
template function is written in python. It takes information from the
book, processes it in some way, then returns a string result. Functions
defined here are usable in templates in the same way that builtin
functions are usable. The function must be named <b>evaluate</b>, and
must have the signature shown below.</p>
<p><code>evaluate(self, formatter, kwargs, mi, locals, your parameters)
→ returning a unicode string</code></p>
<p>The parameters of the evaluate function are:
<ul>
<li><b>formatter</b>: the instance of the formatter being used to
evaluate the current template. You can use this to do recursive
template evaluation.</li>
<li><b>kwargs</b>: a dictionary of metadata. Field values are in this
dictionary.
<li><b>mi</b>: a Metadata instance. Used to get field information.
This parameter can be None in some cases, such as when evaluating
non-book templates.</li>
<li><b>locals</b>: the local variables assigned to by the current
template program.</li>
<li><b>your parameters</b>: You must supply one or more formal
parameters. The number must match the arg count box, unless arg count is
-1 (variable number or arguments), in which case the last argument must
be *args. At least one argument is required, and is usually the value of
the field being operated upon. Note that when writing in basic template
mode, the user does not provide this first argument. Instead it is
supplied by the formatter.</li>
</ul></p>
<p>
The following example function checks the value of the field. If the
field is not empty, the field's value is returned, otherwise the value
EMPTY is returned.
<pre>
name: my_ifempty
arg count: 1
doc: my_ifempty(val) -- return val if it is not empty, otherwise the string 'EMPTY'
program code:
def evaluate(self, formatter, kwargs, mi, locals, val):
if val:
return val
else:
return 'EMPTY'</pre>
This function can be called in any of the three template program modes:
<ul>
<li>single-function mode: {tags:my_ifempty()}</li>
<li>template program mode: {tags:'my_ifempty($)'}</li>
<li>general program mode: program: my_ifempty(field('tags'))</li>
</p>
''')
self.textBrowser.setHtml(help_text)
def initialize(self):
try:
self.builtin_source_dict = json.loads(P('template-functions.json', data=True,
allow_user_override=False).decode('utf-8'))
except:
traceback.print_exc()
self.builtin_source_dict = {}
self.funcs = formatter_functions().get_functions()
self.builtins = formatter_functions().get_builtins_and_aliases()
self.build_function_names_box()
self.function_name.currentIndexChanged[str].connect(self.function_index_changed)
self.function_name.editTextChanged.connect(self.function_name_edited)
self.argument_count.valueChanged.connect(self.enable_replace_button)
self.documentation.textChanged.connect(self.enable_replace_button)
self.program.textChanged.connect(self.enable_replace_button)
self.create_button.clicked.connect(self.create_button_clicked)
self.delete_button.clicked.connect(self.delete_button_clicked)
self.create_button.setEnabled(False)
self.delete_button.setEnabled(False)
self.replace_button.setEnabled(False)
self.clear_button.clicked.connect(self.clear_button_clicked)
self.replace_button.clicked.connect(self.replace_button_clicked)
self.program.setTabStopWidth(20)
self.highlighter = PythonHighlighter(self.program.document())
def enable_replace_button(self):
self.replace_button.setEnabled(self.delete_button.isEnabled())
def clear_button_clicked(self):
self.build_function_names_box()
self.program.clear()
self.documentation.clear()
self.argument_count.clear()
self.create_button.setEnabled(False)
self.delete_button.setEnabled(False)
def build_function_names_box(self, scroll_to='', set_to=''):
self.function_name.blockSignals(True)
func_names = sorted(self.funcs)
self.function_name.clear()
self.function_name.addItem('')
self.function_name.addItems(func_names)
self.function_name.setCurrentIndex(0)
if set_to:
self.function_name.setEditText(set_to)
self.create_button.setEnabled(True)
self.function_name.blockSignals(False)
if scroll_to:
idx = self.function_name.findText(scroll_to)
if idx >= 0:
self.function_name.setCurrentIndex(idx)
if scroll_to not in self.builtins:
self.delete_button.setEnabled(True)
def delete_button_clicked(self):
name = unicode(self.function_name.currentText())
if name in self.builtins:
error_dialog(self.gui, _('Template functions'),
_('You cannot delete a built-in function'), show=True)
if name in self.funcs:
del self.funcs[name]
self.changed_signal.emit()
self.create_button.setEnabled(True)
self.delete_button.setEnabled(False)
self.build_function_names_box(set_to=name)
self.program.setReadOnly(False)
else:
error_dialog(self.gui, _('Template functions'),
_('Function not defined'), show=True)
def create_button_clicked(self):
self.changed_signal.emit()
name = unicode(self.function_name.currentText())
if name in self.funcs:
error_dialog(self.gui, _('Template functions'),
_('Name %s already used')%(name,), show=True)
return
if self.argument_count.value() == 0:
box = warning_dialog(self.gui, _('Template functions'),
_('Argument count should be -1 or greater than zero. '
'Setting it to zero means that this function cannot '
'be used in single function mode.'), det_msg = '',
show=False)
box.bb.setStandardButtons(box.bb.standardButtons() | QDialogButtonBox.Cancel)
box.det_msg_toggle.setVisible(False)
if not box.exec_():
return
try:
prog = unicode(self.program.toPlainText())
cls = compile_user_function(name, unicode(self.documentation.toPlainText()),
self.argument_count.value(), prog)
self.funcs[name] = cls
self.build_function_names_box(scroll_to=name)
except:
error_dialog(self.gui, _('Template functions'),
_('Exception while compiling function'), show=True,
det_msg=traceback.format_exc())
def function_name_edited(self, txt):
self.documentation.setReadOnly(False)
self.argument_count.setReadOnly(False)
self.create_button.setEnabled(True)
self.replace_button.setEnabled(False)
self.program.setReadOnly(False)
def function_index_changed(self, txt):
txt = unicode(txt)
self.create_button.setEnabled(False)
if not txt:
self.argument_count.clear()
self.documentation.clear()
self.documentation.setReadOnly(False)
self.argument_count.setReadOnly(False)
return
func = self.funcs[txt]
self.argument_count.setValue(func.arg_count)
self.documentation.setText(func.doc)
if txt in self.builtins:
if hasattr(func, 'program_text') and func.program_text:
self.program.setPlainText(func.program_text)
elif txt in self.builtin_source_dict:
self.program.setPlainText(self.builtin_source_dict[txt])
else:
self.program.setPlainText(_('function source code not available'))
self.documentation.setReadOnly(True)
self.argument_count.setReadOnly(True)
self.program.setReadOnly(True)
self.delete_button.setEnabled(False)
else:
self.program.setPlainText(func.program_text)
self.delete_button.setEnabled(True)
self.program.setReadOnly(False)
self.replace_button.setEnabled(False)
def replace_button_clicked(self):
self.delete_button_clicked()
self.create_button_clicked()
def refresh_gui(self, gui):
pass
def commit(self):
# formatter_functions().reset_to_builtins()
pref_value = []
for name, cls in self.funcs.iteritems():
if name not in self.builtins:
pref_value.append((cls.name, cls.doc, cls.arg_count, cls.program_text))
self.db.prefs.set('user_template_functions', pref_value)
load_user_template_functions(self.db.library_id, pref_value)
return False
if __name__ == '__main__':
from PyQt4.Qt import QApplication
app = QApplication([])
test_widget('Advanced', 'TemplateFunctions')
|
insomnia-lab/calibre
|
src/calibre/gui2/preferences/template_functions.py
|
Python
|
gpl-3.0
| 10,331
|
# --------------------------------------------------------------------------- #
# CUBECOLOURDIALOG Widget wxPython IMPLEMENTATION
#
# Python Code By:
#
# Andrea Gavana, @ 16 Aug 2007
# Latest Revision: 14 Apr 2010, 12.00 GMT
#
#
# TODO List
#
# 1. Find A Way To Reduce Flickering On The 2 ColourPanels;
#
# 2. See Why wx.GCDC Doesn't Work As I Thought (!). It Looks Slow As A Turtle,
# But Probably I Am Doing Something Wrong While Painting The Alpha Textures.
#
#
# For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please
# Write To Me At:
#
# andrea.gavana@gmail.com
# gavana@kpo.kz
#
# Or, Obviously, To The wxPython Mailing List!!!
#
#
# End Of Comments
# --------------------------------------------------------------------------- #
"""
CubeColourDialog is an alternative implementation of `wx.ColourDialog`.
Description
===========
The CubeColourDialog is an alternative implementation of `wx.ColourDialog`, and it
offers different functionalities with respect to the default wxPython one. It
can be used as a replacement of `wx.ColourDialog` with exactly the same syntax and
methods.
Some features:
- RGB components may be controlled using spin controls or with mouse gestures
on a 3D RGB cube, with the 3 components laying on the X, Y, Z axes;
- HSB components may be controlled using spin controls or with mouse gestures
on a 2D colour wheel;
- Brightness has its own vertical slider to play with;
- The colour alpha channel can be controlled using another vertical slider, or
via spin control;
- The colour alpha channel controls can be completely hidden at startup or the
choice to use the alpha channel can be left to the user while playing with the
dialog, via a simple `wx.CheckBox`;
- The "old colour" and "new colour" are displayed in two small custom panel,
which support alpha transparency and texture;
- CubeColourDialog displays also the HTML colour code in hexadecimal format;
- When available, a corresponding "Web Safe" colour is generated using a 500
web colours "database" (a dictionary inside the widget source code). Web Safe
colours are recognized by all the browsers;
- When available, a corresponding "HTML name" for the selected colour is displayed,
by using the same 500 web colours "database";
- When available, a corresponding "Microsoft Access Code" for the selected colour
is displayed, by using the same 500 web colours "database".
And much more.
Window Styles
=============
This class supports the following window styles:
================== =========== ==================================================
Window Styles Hex Value Description
================== =========== ==================================================
``CCD_SHOW_ALPHA`` 0x1 Show the widget used to control colour alpha channels in `CubeColourDialog`.
================== =========== ==================================================
Events Processing
=================
`No custom events are available for this class.`
License And Version
===================
CubeColourDialog is distributed under the wxPython license.
Latest Revision: Andrea Gavana @ 14 Apr 2010, 12.00 GMT
Version 0.3.
"""
__docformat__ = "epytext"
#----------------------------------------------------------------------
# Beginning Of CUBECOLOURDIALOG wxPython Code
#----------------------------------------------------------------------
import wx
import colorsys
from math import pi, sin, cos, sqrt, atan2
from wx.lib.embeddedimage import PyEmbeddedImage
# Define a translation string
_ = wx.GetTranslation
# Show the alpha control in the dialog
CCD_SHOW_ALPHA = 1
""" Show the widget used to control colour alpha channels in `CubeColourDialog`. """
# Radius of the HSB colour wheel
RADIUS = 100
""" Radius of the HSB colour wheel. """
# Width of the mouse-controlled colour pointer
RECT_WIDTH = 5
""" Width of the mouse-controlled colour pointer. """
# Dictionary keys for the RGB colour cube
RED, GREEN, BLUE = 0, 1, 2
""" Dictionary keys for the RGB colour cube. """
Vertex = wx.Point(95, 109)
Top = wx.Point(95, 10)
Left = wx.Point(16, 148)
Right = wx.Point(174, 148)
colourAttributes = ["r", "g", "b", "h", "s", "v"]
colourMaxValues = [255, 255, 255, 359, 255, 255]
checkColour = wx.Colour(200, 200, 200)
HTMLCodes = {'#B0171F': ['Indian red', '2037680', ''],
'#DC143C': ['Crimson', '3937500', '#CC0033'],
'#FFB6C1': ['Lightpink', '12695295', '#FFCCCC'],
'#FFAEB9': ['Lightpink 1', '12168959', ''],
'#EEA2AD': ['Lightpink 2', '11379438', ''],
'#CD8C95': ['Lightpink 3', '9800909', ''],
'#8B5F65': ['Lightpink 4', '6643595', ''],
'#FFC0CB': ['Pink', '13353215', '#FFCCCC'],
'#FFB5C5': ['Pink 1', '12957183', ''],
'#EEA9B8': ['Pink 2', '12102126', ''],
'#CD919E': ['Pink 3', '10392013', ''],
'#8B636C': ['Pink 4', '7103371', ''],
'#DB7093': ['Palevioletred', '9662683', '#CC6699'],
'#FF82AB': ['Palevioletred 1', '11240191', ''],
'#EE799F': ['Palevioletred 2', '10451438', ''],
'#CD6889': ['Palevioletred 3', '9005261', ''],
'#8B475D': ['Palevioletred 4', '6113163', ''],
'#FFF0F5': ['Lavenderblush 1 (lavenderblush)', '16118015', '#FFFFFF'],
'#EEE0E5': ['Lavenderblush 2', '15065326', ''],
'#CDC1C5': ['Lavenderblush 3', '12960205', ''],
'#8B8386': ['Lavenderblush 4', '8815499', ''],
'#FF3E96': ['Violetred 1', '9846527', ''],
'#EE3A8C': ['Violetred 2', '9190126', ''],
'#CD3278': ['Violetred 3', '7877325', ''],
'#8B2252': ['Violetred 4', '5382795', ''],
'#FF69B4': ['Hotpink', '11823615', '#FF66CC'],
'#FF6EB4': ['Hotpink 1', '11824895', ''],
'#EE6AA7': ['Hotpink 2', '10971886', ''],
'#CD6090': ['Hotpink 3', '9461965', ''],
'#8B3A62': ['Hotpink 4', '6437515', ''],
'#872657': ['Raspberry', '5711495', ''],
'#FF1493': ['Deeppink 1 (deeppink)', '9639167', '#FF0099'],
'#EE1289': ['Deeppink 2', '8983278', ''],
'#CD1076': ['Deeppink 3', '7737549', ''],
'#8B0A50': ['Deeppink 4', '5245579', ''],
'#FF34B3': ['Maroon 1', '11744511', ''],
'#EE30A7': ['Maroon 2', '10957038', ''],
'#CD2990': ['Maroon 3', '9447885', ''],
'#8B1C62': ['Maroon 4', '6429835', ''],
'#C71585': ['Mediumvioletred', '8721863', '#CC0066'],
'#D02090': ['Violetred', '9445584', ''],
'#DA70D6': ['Orchid', '14053594', '#CC66CC'],
'#FF83FA': ['Orchid 1', '16417791', ''],
'#EE7AE9': ['Orchid 2', '15301358', ''],
'#CD69C9': ['Orchid 3', '13199821', ''],
'#8B4789': ['Orchid 4', '8996747', ''],
'#D8BFD8': ['Thistle', '14204888', '#CCCCCC'],
'#FFE1FF': ['Thistle 1', '16769535', ''],
'#EED2EE': ['Thistle 2', '15651566', ''],
'#CDB5CD': ['Thistle 3', '13481421', ''],
'#8B7B8B': ['Thistle 4', '9141131', ''],
'#FFBBFF': ['Plum 1', '16759807', ''],
'#EEAEEE': ['Plum 2', '15642350', ''],
'#CD96CD': ['Plum 3', '13473485', ''],
'#8B668B': ['Plum 4', '9135755', ''],
'#DDA0DD': ['Plum', '14524637', '#CC99CC'],
'#EE82EE': ['Violet', '15631086', '#FF99FF'],
'#FF00FF': ['Magenta (fuchsia)', '16711935', '#FF00FF'],
'#EE00EE': ['Magenta 2', '15597806', ''],
'#CD00CD': ['Magenta 3', '13435085', ''],
'#8B008B': ['Magenta 4 (darkmagenta)', '9109643', '#990099'],
'#800080': ['Purple', '8388736', '#990099'],
'#BA55D3': ['Mediumorchid', '13850042', '#CC66CC'],
'#E066FF': ['Mediumorchid 1', '16738016', ''],
'#D15FEE': ['Mediumorchid 2', '15622097', ''],
'#B452CD': ['Mediumorchid 3', '13456052', ''],
'#7A378B': ['Mediumorchid 4', '9123706', ''],
'#9400D3': ['Darkviolet', '13828244', '#9900CC'],
'#9932CC': ['Darkorchid', '13382297', '#9933CC'],
'#BF3EFF': ['Darkorchid 1', '16727743', ''],
'#B23AEE': ['Darkorchid 2', '15612594', ''],
'#9A32CD': ['Darkorchid 3', '13447834', ''],
'#68228B': ['Darkorchid 4', '9118312', ''],
'#4B0082': ['Indigo', '8519755', '#330099'],
'#8A2BE2': ['Blueviolet', '14822282', '#9933FF'],
'#9B30FF': ['Purple 1', '16724123', ''],
'#912CEE': ['Purple 2', '15608977', ''],
'#7D26CD': ['Purple 3', '13444733', ''],
'#551A8B': ['Purple 4', '9116245', ''],
'#9370DB': ['Mediumpurple', '14381203', '#9966CC'],
'#AB82FF': ['Mediumpurple 1', '16745131', ''],
'#9F79EE': ['Mediumpurple 2', '15628703', ''],
'#8968CD': ['Mediumpurple 3', '13461641', ''],
'#5D478B': ['Mediumpurple 4', '9127773', ''],
'#483D8B': ['Darkslateblue', '9125192', '#333399'],
'#8470FF': ['Lightslateblue', '16740484', ''],
'#7B68EE': ['Mediumslateblue', '15624315', '#6666FF'],
'#6A5ACD': ['Slateblue', '13458026', '#6666CC'],
'#836FFF': ['Slateblue 1', '16740227', ''],
'#7A67EE': ['Slateblue 2', '15624058', ''],
'#6959CD': ['Slateblue 3', '13457769', ''],
'#473C8B': ['Slateblue 4', '9124935', ''],
'#F8F8FF': ['Ghostwhite', '16775416', '#FFFFFF'],
'#E6E6FA': ['Lavender', '16443110', '#FFFFFF'],
'#0000FF': ['Blue', '16711680', '#0000FF'],
'#0000EE': ['Blue 2', '15597568', ''],
'#0000CD': ['Blue 3 (mediumblue)', '13434880', '#0000CC'],
'#00008B': ['Blue 4 (darkblue)', '9109504', '#000099'],
'#000080': ['Navy', '8388608', '#000099'],
'#191970': ['Midnightblue', '7346457', '#000066'],
'#3D59AB': ['Cobalt', '11229501', ''],
'#4169E1': ['Royalblue', '14772545', '#3366CC'],
'#4876FF': ['Royalblue 1', '16741960', ''],
'#436EEE': ['Royalblue 2', '15625795', ''],
'#3A5FCD': ['Royalblue 3', '13459258', ''],
'#27408B': ['Royalblue 4', '9125927', ''],
'#6495ED': ['Cornflowerblue', '15570276', '#6699FF'],
'#B0C4DE': ['Lightsteelblue', '14599344', '#99CCCC'],
'#CAE1FF': ['Lightsteelblue 1', '16769482', ''],
'#BCD2EE': ['Lightsteelblue 2', '15651516', ''],
'#A2B5CD': ['Lightsteelblue 3', '13481378', ''],
'#6E7B8B': ['Lightsteelblue 4', '9141102', ''],
'#778899': ['Lightslategray', '10061943', '#669999'],
'#708090': ['Slategray', '9470064', '#669999'],
'#C6E2FF': ['Slategray 1', '16769734', ''],
'#B9D3EE': ['Slategray 2', '15651769', ''],
'#9FB6CD': ['Slategray 3', '13481631', ''],
'#6C7B8B': ['Slategray 4', '9141100', ''],
'#1E90FF': ['Dodgerblue 1 (dodgerblue)', '16748574', '#3399FF'],
'#1C86EE': ['Dodgerblue 2', '15631900', ''],
'#1874CD': ['Dodgerblue 3', '13464600', ''],
'#104E8B': ['Dodgerblue 4', '9129488', ''],
'#F0F8FF': ['Aliceblue', '16775408', '#FFFFFF'],
'#4682B4': ['Steelblue', '11829830', '#3399CC'],
'#63B8FF': ['Steelblue 1', '16758883', ''],
'#5CACEE': ['Steelblue 2', '15641692', ''],
'#4F94CD': ['Steelblue 3', '13472847', ''],
'#36648B': ['Steelblue 4', '9135158', ''],
'#87CEFA': ['Lightskyblue', '16436871', '#99CCFF'],
'#B0E2FF': ['Lightskyblue 1', '16769712', ''],
'#A4D3EE': ['Lightskyblue 2', '15651748', ''],
'#8DB6CD': ['Lightskyblue 3', '13481613', ''],
'#607B8B': ['Lightskyblue 4', '9141088', ''],
'#87CEFF': ['Skyblue 1', '16764551', ''],
'#7EC0EE': ['Skyblue 2', '15646846', ''],
'#6CA6CD': ['Skyblue 3', '13477484', ''],
'#4A708B': ['Skyblue 4', '9138250', ''],
'#87CEEB': ['Skyblue', '15453831', '#99CCFF'],
'#00BFFF': ['Deepskyblue 1 (deepskyblue)', '16760576', '#00CCFF'],
'#00B2EE': ['Deepskyblue 2', '15643136', ''],
'#009ACD': ['Deepskyblue 3', '13474304', ''],
'#00688B': ['Deepskyblue 4', '9136128', ''],
'#33A1C9': ['Peacock', '13214003', ''],
'#ADD8E6': ['Lightblue', '15128749', '#99CCFF'],
'#BFEFFF': ['Lightblue 1', '16773055', ''],
'#B2DFEE': ['Lightblue 2', '15654834', ''],
'#9AC0CD': ['Lightblue 3', '13484186', ''],
'#68838B': ['Lightblue 4', '9143144', ''],
'#B0E0E6': ['Powderblue', '15130800', '#CCCCFF'],
'#98F5FF': ['Cadetblue 1', '16774552', ''],
'#8EE5EE': ['Cadetblue 2', '15656334', ''],
'#7AC5CD': ['Cadetblue 3', '13485434', ''],
'#53868B': ['Cadetblue 4', '9143891', ''],
'#00F5FF': ['Turquoise 1', '16774400', ''],
'#00E5EE': ['Turquoise 2', '15656192', ''],
'#00C5CD': ['Turquoise 3', '13485312', ''],
'#00868B': ['Turquoise 4', '9143808', ''],
'#5F9EA0': ['Cadetblue', '10526303', '#669999'],
'#00CED1': ['Darkturquoise', '13749760', '#00CCCC'],
'#F0FFFF': ['Azure 1 (azure)', '16777200', '#FFFFFF'],
'#E0EEEE': ['Azure 2', '15658720', ''],
'#C1CDCD': ['Azure 3', '13487553', ''],
'#838B8B': ['Azure 4', '9145219', ''],
'#E0FFFF': ['Lightcyan 1 (lightcyan)', '16777184', '#CCFFFF'],
'#D1EEEE': ['Lightcyan 2', '15658705', ''],
'#B4CDCD': ['Lightcyan 3', '13487540', ''],
'#7A8B8B': ['Lightcyan 4', '9145210', ''],
'#BBFFFF': ['Paleturquoise 1', '16777147', ''],
'#AEEEEE': ['Paleturquoise 2 (paleturquoise)', '15658670', ''],
'#96CDCD': ['Paleturquoise 3', '13487510', ''],
'#668B8B': ['Paleturquoise 4', '9145190', ''],
'#2F4F4F': ['Darkslategray', '5197615', '#336666'],
'#97FFFF': ['Darkslategray 1', '16777111', ''],
'#8DEEEE': ['Darkslategray 2', '15658637', ''],
'#79CDCD': ['Darkslategray 3', '13487481', ''],
'#528B8B': ['Darkslategray 4', '9145170', ''],
'#00FFFF': ['Cyan / aqua', '16776960', '#00FFFF'],
'#00EEEE': ['Cyan 2', '15658496', ''],
'#00CDCD': ['Cyan 3', '13487360', ''],
'#008B8B': ['Cyan 4 (darkcyan)', '9145088', '#009999'],
'#008080': ['Teal', '8421376', '#009999'],
'#48D1CC': ['Mediumturquoise', '13422920', '#33CCCC'],
'#20B2AA': ['Lightseagreen', '11186720', '#339999'],
'#03A89E': ['Manganeseblue', '10397699', ''],
'#40E0D0': ['Turquoise', '13688896', '#33CCCC'],
'#808A87': ['Coldgrey', '8882816', ''],
'#00C78C': ['Turquoiseblue', '9225984', ''],
'#7FFFD4': ['Aquamarine 1 (aquamarine)', '13959039', '#66FFCC'],
'#76EEC6': ['Aquamarine 2', '13037174', ''],
'#66CDAA': ['Aquamarine 3 (mediumaquamarine)', '11193702', '#66CC99'],
'#458B74': ['Aquamarine 4', '7637829', ''],
'#00FA9A': ['Mediumspringgreen', '10156544', '#00FF99'],
'#F5FFFA': ['Mintcream', '16449525', '#FFFFFF'],
'#00FF7F': ['Springgreen', '8388352', '#00FF66'],
'#00EE76': ['Springgreen 1', '7794176', ''],
'#00CD66': ['Springgreen 2', '6737152', ''],
'#008B45': ['Springgreen 3', '4557568', ''],
'#3CB371': ['Mediumseagreen', '7451452', '#33CC66'],
'#54FF9F': ['Seagreen 1', '10485588', ''],
'#4EEE94': ['Seagreen 2', '9760334', ''],
'#43CD80': ['Seagreen 3', '8441155', ''],
'#2E8B57': ['Seagreen 4 (seagreen)', '5737262', '#339966'],
'#00C957': ['Emeraldgreen', '5753088', ''],
'#BDFCC9': ['Mint', '13237437', ''],
'#3D9140': ['Cobaltgreen', '4231485', ''],
'#F0FFF0': ['Honeydew 1 (honeydew)', '15794160', '#FFFFFF'],
'#E0EEE0': ['Honeydew 2', '14741216', ''],
'#C1CDC1': ['Honeydew 3', '12701121', ''],
'#838B83': ['Honeydew 4', '8620931', ''],
'#8FBC8F': ['Darkseagreen', '9419919', '#99CC99'],
'#C1FFC1': ['Darkseagreen 1', '12713921', ''],
'#B4EEB4': ['Darkseagreen 2', '11857588', ''],
'#9BCD9B': ['Darkseagreen 3', '10210715', ''],
'#698B69': ['Darkseagreen 4', '6916969', ''],
'#98FB98': ['Palegreen', '10025880', '#99FF99'],
'#9AFF9A': ['Palegreen 1', '10157978', ''],
'#90EE90': ['Palegreen 2 (lightgreen)', '9498256', '#99FF99'],
'#7CCD7C': ['Palegreen 3', '8179068', ''],
'#548B54': ['Palegreen 4', '5540692', ''],
'#32CD32': ['Limegreen', '3329330', '#33CC33'],
'#228B22': ['Forestgreen', '2263842', '#339933'],
'#00FF00': ['Green 1 (lime)', '65280', '#00FF00'],
'#00EE00': ['Green 2', '60928', ''],
'#00CD00': ['Green 3', '52480', ''],
'#008B00': ['Green 4', '35584', ''],
'#008000': ['Green', '32768', '#009900'],
'#006400': ['Darkgreen', '25600', '#006600'],
'#308014': ['Sapgreen', '1343536', ''],
'#7CFC00': ['Lawngreen', '64636', '#66FF00'],
'#7FFF00': ['Chartreuse 1 (chartreuse)', '65407', '#66FF00'],
'#76EE00': ['Chartreuse 2', '61046', ''],
'#66CD00': ['Chartreuse 3', '52582', ''],
'#458B00': ['Chartreuse 4', '35653', ''],
'#ADFF2F': ['Greenyellow', '3145645', '#99FF33'],
'#CAFF70': ['Darkolivegreen 1', '7405514', ''],
'#BCEE68': ['Darkolivegreen 2', '6876860', ''],
'#A2CD5A': ['Darkolivegreen 3', '5950882', ''],
'#6E8B3D': ['Darkolivegreen 4', '4033390', ''],
'#556B2F': ['Darkolivegreen', '3107669', '#666633'],
'#6B8E23': ['Olivedrab', '2330219', '#669933'],
'#C0FF3E': ['Olivedrab 1', '4128704', ''],
'#B3EE3A': ['Olivedrab 2', '3862195', ''],
'#9ACD32': ['Olivedrab 3 (yellowgreen)', '3329434', '#99CC33'],
'#698B22': ['Olivedrab 4', '2263913', ''],
'#FFFFF0': ['Ivory 1 (ivory)', '15794175', '#FFFFFF'],
'#EEEEE0': ['Ivory 2', '14741230', ''],
'#CDCDC1': ['Ivory 3', '12701133', ''],
'#8B8B83': ['Ivory 4', '8620939', ''],
'#F5F5DC': ['Beige', '14480885', '#FFFFCC'],
'#FFFFE0': ['Lightyellow 1 (lightyellow)', '14745599', '#FFFFFF'],
'#EEEED1': ['Lightyellow 2', '13758190', ''],
'#CDCDB4': ['Lightyellow 3', '11849165', ''],
'#8B8B7A': ['Lightyellow 4', '8031115', ''],
'#FAFAD2': ['Lightgoldenrodyellow', '13826810', '#FFFFCC'],
'#FFFF00': ['Yellow 1 (yellow)', '65535', '#FFFF00'],
'#EEEE00': ['Yellow 2', '61166', ''],
'#CDCD00': ['Yellow 3', '52685', ''],
'#8B8B00': ['Yellow 4', '35723', ''],
'#808069': ['Warmgrey', '6914176', ''],
'#808000': ['Olive', '32896', '#999900'],
'#BDB76B': ['Darkkhaki', '7059389', '#CCCC66'],
'#FFF68F': ['Khaki 1', '9434879', ''],
'#EEE685': ['Khaki 2', '8775406', ''],
'#CDC673': ['Khaki 3', '7587533', ''],
'#8B864E': ['Khaki 4', '5146251', ''],
'#F0E68C': ['Khaki', '9234160', ''],
'#EEE8AA': ['Palegoldenrod', '11200750', '#FFFF99'],
'#FFFACD': ['Lemonchiffon 1 (lemonchiffon)', '13499135', '#FFFFCC'],
'#EEE9BF': ['Lemonchiffon 2', '12577262', ''],
'#CDC9A5': ['Lemonchiffon 3', '10865101', ''],
'#8B8970': ['Lemonchiffon 4', '7375243', ''],
'#FFEC8B': ['Lightgoldenrod 1', '9170175', ''],
'#EEDC82': ['Lightgoldenrod 2', '8576238', ''],
'#CDBE70': ['Lightgoldenrod 3', '7388877', ''],
'#8B814C': ['Lightgoldenrod 4', '5013899', ''],
'#E3CF57': ['Banana', '5754851', ''],
'#FFD700': ['Gold 1 (gold)', '55295', '#FFCC00'],
'#EEC900': ['Gold 2', '51694', ''],
'#CDAD00': ['Gold 3', '44493', ''],
'#8B7500': ['Gold 4', '30091', ''],
'#FFF8DC': ['Cornsilk 1 (cornsilk)', '14481663', '#FFFFCC'],
'#EEE8CD': ['Cornsilk 2', '13494510', ''],
'#CDC8B1': ['Cornsilk 3', '11651277', ''],
'#8B8878': ['Cornsilk 4', '7899275', ''],
'#DAA520': ['Goldenrod', '2139610', '#CC9933'],
'#FFC125': ['Goldenrod 1', '2474495', ''],
'#EEB422': ['Goldenrod 2', '2274542', ''],
'#CD9B1D': ['Goldenrod 3', '1940429', ''],
'#8B6914': ['Goldenrod 4', '1337739', ''],
'#B8860B': ['Darkgoldenrod', '755384', '#CC9900'],
'#FFB90F': ['Darkgoldenrod 1', '1030655', ''],
'#EEAD0E': ['Darkgoldenrod 2', '962030', ''],
'#CD950C': ['Darkgoldenrod 3', '824781', ''],
'#8B6508': ['Darkgoldenrod 4', '550283', ''],
'#FFA500': ['Orange 1 (orange)', '42495', '#FF9900'],
'#EE9A00': ['Orange 2', '39662', ''],
'#CD8500': ['Orange 3', '34253', ''],
'#8B5A00': ['Orange 4', '23179', ''],
'#FFFAF0': ['Floralwhite', '15792895', '#FFFFFF'],
'#FDF5E6': ['Oldlace', '15136253', '#FFFFFF'],
'#F5DEB3': ['Wheat', '11788021', '#FFCCCC'],
'#FFE7BA': ['Wheat 1', '12249087', ''],
'#EED8AE': ['Wheat 2', '11458798', ''],
'#CDBA96': ['Wheat 3', '9878221', ''],
'#8B7E66': ['Wheat 4', '6717067', ''],
'#FFE4B5': ['Moccasin', '11920639', '#FFCCCC'],
'#FFEFD5': ['Papayawhip', '14020607', '#FFFFCC'],
'#FFEBCD': ['Blanchedalmond', '13495295', '#FFFFCC'],
'#FFDEAD': ['Navajowhite 1 (navajowhite)', '11394815', '#FFCC99'],
'#EECFA1': ['Navajowhite 2', '10604526', ''],
'#CDB38B': ['Navajowhite 3', '9155533', ''],
'#8B795E': ['Navajowhite 4', '6191499', ''],
'#FCE6C9': ['Eggshell', '13231868', ''],
'#D2B48C': ['Tan', '9221330', '#CCCC99'],
'#9C661F': ['Brick', '2057884', ''],
'#FF9912': ['Cadmiumyellow', '1219071', ''],
'#FAEBD7': ['Antiquewhite', '14150650', '#FFFFCC'],
'#FFEFDB': ['Antiquewhite 1', '14413823', ''],
'#EEDFCC': ['Antiquewhite 2', '13426670', ''],
'#CDC0B0': ['Antiquewhite 3', '11583693', ''],
'#8B8378': ['Antiquewhite 4', '7897995', ''],
'#DEB887': ['Burlywood', '8894686', '#CCCC99'],
'#FFD39B': ['Burlywood 1', '10212351', ''],
'#EEC591': ['Burlywood 2', '9553390', ''],
'#CDAA7D': ['Burlywood 3', '8235725', ''],
'#8B7355': ['Burlywood 4', '5600139', ''],
'#FFE4C4': ['Bisque 1 (bisque)', '12903679', '#FFFFCC'],
'#EED5B7': ['Bisque 2', '12047854', ''],
'#CDB79E': ['Bisque 3', '10401741', ''],
'#8B7D6B': ['Bisque 4', '7044491', ''],
'#E3A869': ['Melon', '6924515', ''],
'#ED9121': ['Carrot', '2200045', ''],
'#FF8C00': ['Darkorange', '36095', '#FF9900'],
'#FF7F00': ['Darkorange 1', '32767', ''],
'#EE7600': ['Darkorange 2', '30446', ''],
'#CD6600': ['Darkorange 3', '26317', ''],
'#8B4500': ['Darkorange 4', '17803', ''],
'#FF8000': ['Orange', '33023', ''],
'#FFA54F': ['Tan 1', '5219839', ''],
'#EE9A49': ['Tan 2', '4823790', ''],
'#CD853F': ['Tan 3 (peru)', '4163021', '#CC9933'],
'#8B5A2B': ['Tan 4', '2841227', ''],
'#FAF0E6': ['Linen', '15134970', '#FFFFFF'],
'#FFDAB9': ['Peachpuff 1 (peachpuff)', '12180223', '#FFCCCC'],
'#EECBAD': ['Peachpuff 2', '11389934', ''],
'#CDAF95': ['Peachpuff 3', '9809869', ''],
'#8B7765': ['Peachpuff 4', '6649739', ''],
'#FFF5EE': ['Seashell 1 (seashell)', '15660543', '#FFFFFF'],
'#EEE5DE': ['Seashell 2', '14607854', ''],
'#CDC5BF': ['Seashell 3', '12568013', ''],
'#8B8682': ['Seashell 4', '8554123', ''],
'#F4A460': ['Sandybrown', '6333684', '#FF9966'],
'#C76114': ['Rawsienna', '1335751', ''],
'#D2691E': ['Chocolate', '1993170', '#CC6633'],
'#FF7F24': ['Chocolate 1', '2392063', ''],
'#EE7621': ['Chocolate 2', '2193134', ''],
'#CD661D': ['Chocolate 3', '1926861', ''],
'#8B4513': ['Chocolate 4 (saddlebrown)', '1262987', '#993300'],
'#292421': ['Ivoryblack', '2171945', ''],
'#FF7D40': ['Flesh', '4226559', ''],
'#FF6103': ['Cadmiumorange', '221695', ''],
'#8A360F': ['Burntsienna', '997002', ''],
'#A0522D': ['Sienna', '2970272', '#996633'],
'#FF8247': ['Sienna 1', '4686591', ''],
'#EE7942': ['Sienna 2', '4356590', ''],
'#CD6839': ['Sienna 3', '3762381', ''],
'#8B4726': ['Sienna 4', '2508683', ''],
'#FFA07A': ['Lightsalmon 1 (lightsalmon)', '8036607', '#FF9966'],
'#EE9572': ['Lightsalmon 2', '7509486', ''],
'#CD8162': ['Lightsalmon 3', '6455757', ''],
'#8B5742': ['Lightsalmon 4', '4347787', ''],
'#FF7F50': ['Coral', '5275647', '#FF6666'],
'#FF4500': ['Orangered 1 (orangered)', '17919', '#FF3300'],
'#EE4000': ['Orangered 2', '16622', ''],
'#CD3700': ['Orangered 3', '14285', ''],
'#8B2500': ['Orangered 4', '9611', ''],
'#5E2612': ['Sepia', '1189470', ''],
'#E9967A': ['Darksalmon', '8034025', '#FF9966'],
'#FF8C69': ['Salmon 1', '6917375', ''],
'#EE8262': ['Salmon 2', '6456046', ''],
'#CD7054': ['Salmon 3', '5533901', ''],
'#8B4C39': ['Salmon 4', '3755147', ''],
'#FF7256': ['Coral 1', '5665535', ''],
'#EE6A50': ['Coral 2', '5270254', ''],
'#CD5B45': ['Coral 3', '4545485', ''],
'#8B3E2F': ['Coral 4', '3096203', ''],
'#8A3324': ['Burntumber', '2372490', ''],
'#FF6347': ['Tomato 1 (tomato)', '4678655', '#FF6633'],
'#EE5C42': ['Tomato 2', '4349166', ''],
'#CD4F39': ['Tomato 3', '3755981', ''],
'#8B3626': ['Tomato 4', '2504331', ''],
'#FA8072': ['Salmon', '7504122', '#FF9966'],
'#FFE4E1': ['Mistyrose 1 (mistyrose)', '14804223', '#FFCCFF'],
'#EED5D2': ['Mistyrose 2', '13817326', ''],
'#CDB7B5': ['Mistyrose 3', '11909069', ''],
'#8B7D7B': ['Mistyrose 4', '8093067', ''],
'#FFFAFA': ['Snow 1 (snow)', '16448255', '#FFFFFF'],
'#EEE9E9': ['Snow 2', '15329774', ''],
'#CDC9C9': ['Snow 3', '13224397', ''],
'#8B8989': ['Snow 4', '9013643', ''],
'#BC8F8F': ['Rosybrown', '9408444', '#CC9999'],
'#FFC1C1': ['Rosybrown 1', '12698111', ''],
'#EEB4B4': ['Rosybrown 2', '11842798', ''],
'#CD9B9B': ['Rosybrown 3', '10197965', ''],
'#8B6969': ['Rosybrown 4', '6908299', ''],
'#F08080': ['Lightcoral', '8421616', '#FF9999'],
'#CD5C5C': ['Indianred', '6053069', '#CC6666'],
'#FF6A6A': ['Indianred 1', '6974207', ''],
'#EE6363': ['Indianred 2', '6513646', ''],
'#8B3A3A': ['Indianred 4', '3816075', ''],
'#CD5555': ['Indianred 3', '5592525', ''],
'#A52A2A': ['Brown', '2763429', '#993333'],
'#FF4040': ['Brown 1', '4210943', ''],
'#EE3B3B': ['Brown 2', '3881966', ''],
'#CD3333': ['Brown 3', '3355597', ''],
'#8B2323': ['Brown 4', '2302859', ''],
'#B22222': ['Firebrick', '2237106', '#993333'],
'#FF3030': ['Firebrick 1', '3158271', ''],
'#EE2C2C': ['Firebrick 2', '2895086', ''],
'#CD2626': ['Firebrick 3', '2500301', ''],
'#8B1A1A': ['Firebrick 4', '1710731', ''],
'#FF0000': ['Red 1 (red)', '255', '#FF0000'],
'#EE0000': ['Red 2', '238', ''],
'#CD0000': ['Red 3', '205', ''],
'#8B0000': ['Red 4 (darkred)', '139', '#990000'],
'#800000': ['Maroon', '128', '#990000'],
'#8E388E': ['Sgi beet', '9320590', ''],
'#7171C6': ['Sgi slateblue', '13005169', ''],
'#7D9EC0': ['Sgi lightblue', '12623485', ''],
'#388E8E': ['Sgi teal', '9342520', ''],
'#71C671': ['Sgi chartreuse', '7456369', ''],
'#8E8E38': ['Sgi olivedrab', '3706510', ''],
'#C5C1AA': ['Sgi brightgray', '11190725', ''],
'#C67171': ['Sgi salmon', '7434694', ''],
'#555555': ['Sgi darkgray', '5592405', ''],
'#1E1E1E': ['Sgi gray 12', '1973790', ''],
'#282828': ['Sgi gray 16', '2631720', ''],
'#515151': ['Sgi gray 32', '5329233', ''],
'#5B5B5B': ['Sgi gray 36', '5987163', ''],
'#848484': ['Sgi gray 52', '8684676', ''],
'#8E8E8E': ['Sgi gray 56', '9342606', ''],
'#AAAAAA': ['Sgi lightgray', '11184810', ''],
'#B7B7B7': ['Sgi gray 72', '12040119', ''],
'#C1C1C1': ['Sgi gray 76', '12698049', ''],
'#EAEAEA': ['Sgi gray 92', '15395562', ''],
'#F4F4F4': ['Sgi gray 96', '16053492', ''],
'#FFFFFF': ['White', '16777215', '#FFFFFF'],
'#F5F5F5': ['White smoke (gray)', '16119285', '#FFFFFF'],
'#DCDCDC': ['Gainsboro', '14474460', '#CCCCCC'],
'#D3D3D3': ['Lightgrey', '13882323', '#CCCCCC'],
'#C0C0C0': ['Silver', '12632256', '#CCCCCC'],
'#A9A9A9': ['Darkgray', '11119017', '#999999'],
'#808080': ['Gray', '8421504', ''],
'#696969': ['Dimgray (gray 42)', '6908265', '#666666'],
'#000000': ['Black', '0', '#000000'],
'#FCFCFC': ['Gray 99', '16579836', ''],
'#FAFAFA': ['Gray 98', '16448250', ''],
'#F7F7F7': ['Gray 97', '16250871', ''],
'#F2F2F2': ['Gray 95', '15921906', ''],
'#F0F0F0': ['Gray 94', '15790320', ''],
'#EDEDED': ['Gray 93', '15592941', ''],
'#EBEBEB': ['Gray 92', '15461355', ''],
'#E8E8E8': ['Gray 91', '15263976', ''],
'#E5E5E5': ['Gray 90', '15066597', ''],
'#E3E3E3': ['Gray 89', '14935011', ''],
'#E0E0E0': ['Gray 88', '14737632', ''],
'#DEDEDE': ['Gray 87', '14606046', ''],
'#DBDBDB': ['Gray 86', '14408667', ''],
'#D9D9D9': ['Gray 85', '14277081', ''],
'#D6D6D6': ['Gray 84', '14079702', ''],
'#D4D4D4': ['Gray 83', '13948116', ''],
'#D1D1D1': ['Gray 82', '13750737', ''],
'#CFCFCF': ['Gray 81', '13619151', ''],
'#CCCCCC': ['Gray 80', '13421772', ''],
'#C9C9C9': ['Gray 79', '13224393', ''],
'#C7C7C7': ['Gray 78', '13092807', ''],
'#C4C4C4': ['Gray 77', '12895428', ''],
'#C2C2C2': ['Gray 76', '12763842', ''],
'#BFBFBF': ['Gray 75', '12566463', ''],
'#BDBDBD': ['Gray 74', '12434877', ''],
'#BABABA': ['Gray 73', '12237498', ''],
'#B8B8B8': ['Gray 72', '12105912', ''],
'#B5B5B5': ['Gray 71', '11908533', ''],
'#B3B3B3': ['Gray 70', '11776947', ''],
'#B0B0B0': ['Gray 69', '11579568', ''],
'#ADADAD': ['Gray 68', '11382189', ''],
'#ABABAB': ['Gray 67', '11250603', ''],
'#A8A8A8': ['Gray 66', '11053224', ''],
'#A6A6A6': ['Gray 65', '10921638', ''],
'#A3A3A3': ['Gray 64', '10724259', ''],
'#A1A1A1': ['Gray 63', '10592673', ''],
'#9E9E9E': ['Gray 62', '10395294', ''],
'#9C9C9C': ['Gray 61', '10263708', ''],
'#999999': ['Gray 60', '10066329', ''],
'#969696': ['Gray 59', '9868950', ''],
'#949494': ['Gray 58', '9737364', ''],
'#919191': ['Gray 57', '9539985', ''],
'#8F8F8F': ['Gray 56', '9408399', ''],
'#8C8C8C': ['Gray 55', '9211020', ''],
'#8A8A8A': ['Gray 54', '9079434', ''],
'#878787': ['Gray 53', '8882055', ''],
'#858585': ['Gray 52', '8750469', ''],
'#828282': ['Gray 51', '8553090', ''],
'#7F7F7F': ['Gray 50', '8355711', ''],
'#7D7D7D': ['Gray 49', '8224125', ''],
'#7A7A7A': ['Gray 48', '8026746', ''],
'#787878': ['Gray 47', '7895160', ''],
'#757575': ['Gray 46', '7697781', ''],
'#737373': ['Gray 45', '7566195', ''],
'#707070': ['Gray 44', '7368816', ''],
'#6E6E6E': ['Gray 43', '7237230', ''],
'#6B6B6B': ['Gray 42', '7039851', ''],
'#696969': ['Dimgray (gray 42)', '6908265', '#666666'],
'#666666': ['Gray 40', '6710886', ''],
'#636363': ['Gray 39', '6513507', ''],
'#616161': ['Gray 38', '6381921', ''],
'#5E5E5E': ['Gray 37', '6184542', ''],
'#5C5C5C': ['Gray 36', '6052956', ''],
'#595959': ['Gray 35', '5855577', ''],
'#575757': ['Gray 34', '5723991', ''],
'#545454': ['Gray 33', '5526612', ''],
'#525252': ['Gray 32', '5395026', ''],
'#4F4F4F': ['Gray 31', '5197647', ''],
'#4D4D4D': ['Gray 30', '5066061', ''],
'#4A4A4A': ['Gray 29', '4868682', ''],
'#474747': ['Gray 28', '4671303', ''],
'#454545': ['Gray 27', '4539717', ''],
'#424242': ['Gray 26', '4342338', ''],
'#404040': ['Gray 25', '4210752', ''],
'#3D3D3D': ['Gray 24', '4013373', ''],
'#3B3B3B': ['Gray 23', '3881787', ''],
'#383838': ['Gray 22', '3684408', ''],
'#363636': ['Gray 21', '3552822', ''],
'#333333': ['Gray 20', '3355443', ''],
'#303030': ['Gray 19', '3158064', ''],
'#2E2E2E': ['Gray 18', '3026478', ''],
'#2B2B2B': ['Gray 17', '2829099', ''],
'#292929': ['Gray 16', '2697513', ''],
'#262626': ['Gray 15', '2500134', ''],
'#242424': ['Gray 14', '2368548', ''],
'#212121': ['Gray 13', '2171169', ''],
'#1F1F1F': ['Gray 12', '2039583', ''],
'#1C1C1C': ['Gray 11', '1842204', ''],
'#1A1A1A': ['Gray 10', '1710618', ''],
'#171717': ['Gray 9', '1513239', ''],
'#141414': ['Gray 8', '1315860', ''],
'#121212': ['Gray 7', '1184274', ''],
'#0F0F0F': ['Gray 6', '986895', ''],
'#0D0D0D': ['Gray 5', '855309', ''],
'#0A0A0A': ['Gray 4', '657930', ''],
'#080808': ['Gray 3', '526344', ''],
'#050505': ['Gray 2', '328965', ''],
'#030303': ['Gray 1', '197379', ''],
}
HSVWheelImage = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAMoAAADJCAIAAADtkzpRAAAAA3NCSVQICAjb4U/gAAAgAElE"
"QVR4nO2decgsz3fWn6q5MWLcgsaoqAiKRBM14gJuNCj0T9SIuEVRiEIwSDSCElFJBPegKGqI"
"a1QigiYuqFHQEeU3RtSgRjQ/0ASXgBqXuGBihPh7u45/VJ1znnOqet737vd+vzZz5/b09PQs"
"/ennPPVUdb/lox/9KP7/NE1/ddsrUIELwDPLhz/qdn3bn/cdnZ697Q/wNqcv3/YSWCkVYg+B"
"Akhfs9zdzse2/YzC7/fhJu/DhdcfIJ4uKBWlQApQUAtQgIJyQa2QAikoFejrF5qpNKPLRV/e"
"bx1KKcB/3XZm7rt+yGj74OP1O+MOrkQSEVONGABAqQMjQw3pVlczypkogvZCKQAg/4c/zIcA"
"tQ8mXl+07cUlysWps6U7/lJdbAroWSMGSmGZFCuyFTBK8xFQ0XsBofYdH1DUPlB4fWHwUh2p"
"XgGdrV4K6yQ/3WwV4mmpVcuHzBlp2HhISJnCSULtuyhq/+uDxdkHAa8v2HYMrapa1IrVO8SC"
"aPMYDzscJULDy2Wug5OeSYJskjShNZk2CXoG+WRtJfzXDwRn7zdenzcO+nJBKa49Nm8qVVS9"
"2G+xShUlrJj8KCt9axmyOnGWwJpkbKYtsGVSB0iBfL9tvwD/4T2H7L3E61eQXFE5qxGvoVDk"
"t4wnLoKdp6KK1TDm+wsby9iZA5u9fPL1NepZHUsWbPFTQPtBKmb/5v3k7D3D65cOz97lyg3W"
"dDPpKlC/VRQROFgueBXA8PulogINQMWFKHnUk61FS99FCKzCYJGS+ZIyVhti9sO2/QL8y/cN"
"svcGr1+odbAGfeoy47e5RCaGdE2k0AuQilrHHkXFpasXF9NYNOeayA/npuLa1BfHsUUxk1nM"
"Pn3bL8C/eH8gew/w+rnaP6MKFG59JxZNrBSRZMKG/CAs7I7KoGy6/VZRFbJnBU1V5FTAuNn4"
"mPcKNbEGti4EFrQuG3NjBaB95rZfgH/6PkD2TuP1szS+wgSWOqpagysflisVyhpnVLeKalXR"
"GKybrWcF0msrRn1c+K1owpK1ytlEnUQrJhSt4KLS2FTPSvH7PAO0n7DtF+AfvtuQvaN47dsO"
"4JkSoykDsxWqJLkrhsmNv+UU+vBiu0pTDClOZyWGvNlYI2RpPvn38yqZwrAOetawdKs0o35O"
"gPaTt/0C/L13FbL6+CpvfNq3nwnUilr8Vgou/VbxrISn+u1CM12HLiXf14JL9flaUCs+wdaB"
"vkv1V10KatW3ps+QHvJbX+ztani7S0WtY2v8kWz7NX02XTiW18VvMpzA9lP3t73T1tO7pV77"
"9nMwpKVC7Y7mVW62pgyiFpccbzYWFz9vKvbmoYqc0Ba6KoyaeHHjHwRsKovB4MdsIgRdNeuZ"
"mffQhGSDX9FUZU3Vmq7jVRIQQLaftKPh9o/eLRl7V/Dat58PoAzfw7EQusTa8VqASFiNuZf3"
"/Bhn9HKptNocpery7utr9bJ152bNSV/znC2HCQu2rFAK2a95BloiW0VFA1qDCKRtP37HgdvX"
"vSuQvRN47dtnY3QwLw9os1k1YpSNF2kVNx5BAuZUEYh93lSnGh9wrxa817KbiIdLMEyRvGZa"
"GK29ffd0dLXkwLrxwlCy0qmCVIhACqQBsv3o/fbP3wnC3j5e+/ZLVflhyXVsohsoNeZbhXDp"
"hc/vuXrGyL6S6+fuSFgfpb324t1KbdatabhEkqi5A7u3T/n4AbUw/Itzi9JqYkUrmrLWEaMI"
"RCCAiN4wZOwz9tvH3j5hbxOvffscaCOOG1NxB/QffSgWAljcYGQZQySsL0kRvzGUZkL3UZ8i"
"lC0JGHNWc1lEiSTN6Vd1sFizuUUplAAPyIbxkjZEqzU0QWuQijZkbPvhOw7cvvFtQvbW8Nq3"
"zyWnFX76pGEFwCCsQxPMFht/NvIIUT6rFEsaqFb6vMX6FaWqbqUY7MRmGW3By9OSJGbLh4tI"
"gqyYrWw1UYACaYMzljG07Yfut3/91gh7O3jt2+dp4y7gpYep/bLdZduu6h6/wCELYFGzcR2x"
"1iBOVUcomLvySgr3XhdiJcgVx2Ap96KvVsqKrQrUIYSJJ3SBrKMCZvulkDW0ApGgW2a/3I0B"
"TbYfvN++6e0Q9qbx2rfPRyyI6cieHiaPXzGk6H7LMekZyLE5ScUlyrsUi4pWRblot2MsgukW"
"4JvHpnLho29nvgoUN3TmLojtxAmyVlAhgmZmq6jHF7VnHbthy7YfuOPA7ZvfNGRvFK99+7Xw"
"3o9gfvngnrwX1PEUKGEUKt65mVANwWOJ6u0v3VSXutbDiJ6N9c9c8ayiUbCeG48TZ6lWSuRs"
"yVb3+8UqYPT4nrpVtNFybAyTUPtR7dcAC/4U2vap++2/vFHC3hxe+/bry0q0av7p54dCVqlY"
"0FoHNJxZLAnz1iW1IosNkbAt0zqiLv6iKJwKWDxOWLQWSqZCZfGV9TaG4mhyZcYfzrdMYUR/"
"CLf5XiURsJO2fe/99t/eHGFvCK99+w32O841MaIWCoq+5KK1w6GBO30wYamFWL16mhsbq1Hf"
"diWfVIoOl9Amxbo/e1q+8F7ktJJoDbdOvY0uZvaSHsrryjpowmEi1BrQuFy2geUolC5m2yfv"
"t//5hgh7E3jt22/up+VEmBZ+i/jzp5S5WnzGyhnbdnf0dl8BOJFppiivtnGup50SHk54lqmy"
"95qzCW7rJbb4KWvTFBpyaBUzhRF1iFYXqg7WkCshxRrWfkDWmqdi332/feubIOy147VvX0xj"
"3sdBOctYfGitKnsqeRGjJ6DGtBUALmkWTNj5jJU2BSWpf4AaEekGn8vc2Y1zYK/yKeuqC7aG"
"blUSnHiDKZwSo0KVwSJr38kD+zCbadsn7bdvf+2EvV689u23UiNxeHZOuWqEjFDz2IIFjwhb"
"jABTrRpmizTM8zCyXyABM0Gyl7tWwV+VpYvzCBp4PXsvifxlerj3WstodvcFgAhaGa1Codxr"
"VqwG4o/xamijkm6fuN++4/US9hrx2rffUcfYhGY/t3GmS1oUrcRTWshHc4ETVnXX+kzJ914u"
"CSzudkQfYq9G3kIyHw89e69Zq2YfdiJjpljD4EeSRrilLkIqWhmJ/HkY0TSMACPFLcru5XzJ"
"9mzHgZu8LsheC1779iUYfnyETFQTTRi6klkQxd6r6C/r9ZSPZjXFxbQKFFJMwwxDuxKOGqDG"
"33C5aIksgK7ZXaN3B02Q2Sf0b0EJRWiyRMWyGLnwt2PRYuNFMNl8h6YLEiG1Viwz+1Nz8thv"
"l9dC2GtSLwu3kpsx72W/uw1PSNbehix7GY14AdRHFJFahhQsYF4l4YUStKYUX6eUcdYQD2Fd"
"GHy6P6uPrs1PCSPo2RRGyPBVLmMnYUSrOiSMlQwQDCgtEvu/++07vXrCXj1e+/b7Uyif7HzN"
"cjXfz63IFveWiZzfVMlGbDGd0LFuP5KCdjlsZvu06dAqPsGy+5WAcQVMhVKIV4nLGTsLI7wm"
"khmwBqOKVjdevag2NvWsTzLqYCfMlKxplTRVU8JeOQl45Xjt2x/koKgueAqcxZkZL08lJncP"
"OrenEE+euJ4M/2I7z0+J1VkNBSrQKp5hnDt0Fnd5WYzqlewXZxYtMmdhhEcS7O4nF2/OnYug"
"BHosj/CQDEoYzZPObf9nv32XVyxgrxKvffuylG9xdbCHE1U5D2Plm70XYefjCjFm7OEcgM08"
"gQLYsMTCsN5QmEdJ3PFe0weG1jtDzbRqroPBeBlkaGX0W3tvT3FTH5xW17OZKlFfH1ELGla2"
"b9tv3+1VEvbK8Nq3P8pj5eoJT2dVkvHih7S3Go+nSCHF1PlYAaQwbB5jCBczQ00uI8EHRpFN"
"hS97LzZbVM1nsEzGiq6WNCyFEeMlqlKUO5iMNfZYsvJYMrzXwtebmPGStv3P/fbJr4ywV4PX"
"vv0JeCOfdct2SeIpiZatYKaqRchCIyBKHdJQaR7RWkfcgBSGmbUn5vrHuERcSskYZQGj3Iut"
"2Lo+EnDzOWcmV9Cuaynmt7J/l9Edb1qVW4sUfc1UtYZW0Io6M2tdCkS2b9lvn/JqCHsFeO3b"
"n8KoiQ7W7L1mGSM+OLYYMKlisXgkMRh7C45XmW6I2oZJybhiQuts1qc6cCx1FYNFhuwYC/GE"
"fdkIk6Tv0nuBoKsthtKvx0Sk+6nxKMuyqO3HNmnYf95v3/cVEPayeO3bV1gMoT9rmaCxheua"
"SGyxtpmj9w6iyeCbZrivT1WSC6IhBWfOJK1/jEKfLY0D6ze7piEY/fveK1pGbpr0bzdzZq1F"
"lqsuSNRa9B6hyFOLhInFEy0q1qpW6sLtP+y3H/CyhL28el1mTeKfft4NBJ+YwvFeiUomego1"
"athbYVchMMSnEiGd/3ju9MVCV4OsoABSRo9TOJvjzHstNaxmmwUe5lUgddUX5DZrjIZI9msK"
"IzjKahRDBMKIp0FnVez0Nn7gVxBVvBRe+/bn9JRU8x+FKuOiOE6O6pGiqd0mQe1ID7hWMl7r"
"Kkn3gHZTcipB5JmYCXcWzVSxl6/hGOD5kpTM5DzWSih5RFXoBVrFpx5l2XItgrDyxxSq99JS"
"uFCv1tDK9k377Qe/lIC9OF779lWg8SqxQGSkzr18bkuWvGaNbIUWJXN2UhxzPME1kQWs0rzp"
"GYB+vriegJ/DVZoxa5XKoj+s4dMub06hF75GMpYUy+zX3AsklZKIqGHzzXSxS5eR147tX++3"
"H/rihL2Mel0iIpgfxsLBbM3MneEVXAt7OHIqtiQR5qcSVdUt5OKIBBy8kvb3HaMgoe3K2XvN"
"9ZFom9niKhlO1qj6O6jRTqZ+ik+z/RLSJ85RqdvR0ZHhwxhBwKtqewUl8gXx2re/Es9yXv7Q"
"C/KibuWyuJKxUGVi63LWMGPFQ4qpFYm4pCh8oHFpUEtktHm39+y9uCDGh/yxWdKCXOk7NrVc"
"hYZtyQSWjDro8Sl1+GRfbyUyNAz195szMBkPoeW41e1f7bdPe0EBexG89u2rEa7dLXqyTS6L"
"CbUSOLOf3imcZSxCFsriKqQwGuZ+7mzCqBPJeLKGpC1p1YVNElWkx3w4zWwlpPgDd/IKvyr3"
"Xjf4mIg7YcQikVcFalMj0eqm+bCmNdEeSrVnt4/tt894EcJeTL3sWlmwtk88ppN6Zb+VFCsK"
"m69cwsvTfRCGSN4gCYu2JCbUQmCRGpJk3Vq8DgVW3zd4L/7kRJuFETA3zko2DbkRwDx79Pgm"
"SK2FdDQD1MbHCVXPPJbWypHE2kOJUisvxMnz47Vvf9POnGGPVc8f1gVns7W3JS5U6dniW/AO"
"IqKN99OyFZkIQ42c0bDVXrM4rXhGVWzhvayyp/nEVqUwQkXrJIyQpFhssGIYwbY9jJKYwq2G"
"abnWSrE8jHBsWjFb3f7Zfvsxzy1gz4fXvv1tuL0Va+lQyQhSz8IWbS+XgyRswYFNVbI/G5qT"
"0ZMZYU86C7JGzth4Ff2bMdW/y+Pei+s+s0VUpVsxJVG73XuvU2txiuPlbnwaHNW0sLFQnT/0"
"5Q1Stn+833788xH2vOrFSeDsQnLoRZYrAMRH+VL22FotOUveK3ImKlT38WLCWLegnHEAZlbs"
"nveKVZLBmqlC1C0UGyfo5wKtxnI1c/fnGmbGq/GbDEpC4tU4hphcl7Vbx/LnLpHPgde+3bi1"
"yId1jbQtTf0sVCfeKwjVrF7xYWCLF04kjZCCBw/etV+ITzV4bV0YfP7wDNmqzWhhRG8e6YnX"
"PL55cKYFsSWPRf3W3OibdSsnpVXbhkLVUF0XyHXZbTRFR76//YP99pOeQ8Ceite+/X3kJCKg"
"w45kablmB5bqJm+K9OyOeq2VzGolFvbLrDqKN1C4Iem6pSJdAGHvFQ8wPoQSRksHNswDDUy1"
"y3TNYYTxtBrF1fd6zk5TPMHR11xYG/1g2gjw1zJnpmfYbvtteyphT1evuvxZJ9TckHHRnMrH"
"rF5SJhyXMkZe3ncqiRYXypmqfEv5Kp1aUvTTWg99JXeVBCzVRCuICSzhQlnJeGWbxbUphxEc"
"n8qkT1QErajNvoq9fxMdki+hXNtqvLzV5wpan4TXvn0twmDwkESwo4qtKl+Symj13eAvrycw"
"EXyL+lhoR04GH4wRiRlOmpDAwKh/PJM3P1qW3osONgY9lOx4AiPYE5F5L9QLlMbRk2g1yYrl"
"+nRWH6PNkph1SQsWza39lPJLgZTt7+y3n/4kAXuievlF4WM5SIfvApdJ25YSyK1CWyGXRWo3"
"BK+zgsx2KhO2Dils4KFSyN9uwHcZQJxCVsJhM7OVjsym725jnfUyXX3c6RycWinMALWsTyGG"
"0ALnpo2qoT+bKqDyNJ6Fr/OcHv9xvPbt67Doa/PdPNULscpiyyMcS3PGo8RkekkooJG2cB9p"
"M1jvtCJhV8ckEyZkwqSgAFJ1pFd0manoS6RteYG4ABlEgBRrpSESFJ8yVcKd1mqkbGygpakh"
"3LK25xRuCVt+az/qiJ3EXCvb39xvP+NxAXuKel0qAIdmWQTTDHuUXEz52bn0JLzOauK0JEPG"
"C58SUlQqkUm6IjqLGzkt+3aMERdr7r0uRfQEjbMwIg8lncdyreLT5KtafGhNRf5o1uEdlqsE"
"+kiKovr6JAF7BK99+xjoIhGrupBtFpyMwntltmgrQ8Z7kc2ZC1sqiyxmtl9JyWw3B+O1OjvN"
"YILO+HUlqi9sq28t8QM3OqJ4IKGl9tZ7neogdfjw2MDg6FOgyt7o3Fflh73RQKZq2QLw0lmp"
"y2hscPvq/fZZjwjYo+plf3Zgvok22hNqWa4SWJMhSwimoomJpOXCpG18ABrrJZ0FqTeDDMxc"
"P6tWn2p65Cwu98V+CxC+0JIJ2BRGIHcj6p8+CHFUaCfykAeWqNRg5Jf3H4AechuQnTumxCt4"
"/IMgg5bRxwXsHl779g3Rdcn0sz5uyOI+YCWYYVrwRBVWrLfgzHglvKLvcZVCaEjmeILv9a86"
"diu2GA8dayKTDaJKathvFkbUABbm8H0aEDG3GeduxEZplg9MjYFWqw7TkC7QvPkthUwiZH2m"
"le0v7beff0/A7qvXRaVrYTvqhI6JVkStzFxOdTDjSxVndvRggJYyRqix72GYels4XDynz5Oe"
"2QcrBQJdfz5sGHcGa6qPyzDibBSXgZUGRFg15DRBY6qFzeIuRQXFTzdiMSOFawUCNCNJaXNz"
"VsfFEF9Mvfbt3wIV/d8pT65DE3xZw6K7Yh0KRXPCaGztRN4WJixBlgij0x5z43E6v8PZquPv"
"djf+OgQT/yYz3FYT7U9scNf18gQNxCTiLD6dfT03Hpp/9WTCeChOslkeQOhLEGvioO3oh8n2"
"5/bbLzkVsDvqNUbdYFSljJTNc6My1ousVSvUwvKEJmnDndKZ3dhdDStYj6RgwrzBSOMpAA+r"
"0jHDkAnJGGNtZr//kanUC7SKT4VD+TSyeaqPfs4PtwpPAtJFy5EHR+iYVS+OhzIXX3VgIPgC"
"6oWphxFBexyFGb4zuXpsodio1yVS8U1Z2IQsWrZlETJMYC0SihhP2MN6oZbj9JWXYA22qvde"
"l9ClOOvWMpRPA0oFHm4FHWpxhXTGR9QnQ8qliLIJJozX51eNr1CeG699+2YeM2gzGCYX1mCE"
"/75BkM5TiSxapG3ZuvHDGTtaOJszUdoYMitVj4QU+gGKvsU4fwRj/fng4WMsWK74Nxmt95pT"
"U3P3J+NqchGcKiOHW6FLUTIc5ugbidMIVOGne4BJQlTBA1IU7m4WsX3FfvucdX08Uy8/w8wA"
"gv6gWDGXDujZjREuYVjY8oVLQ2YUTjVxbfaT9yIN4/NsC84H59RHvtfsvYQWFmvZmFTHExXN"
"1HOUFcOtfPURxMRLf1F7iOrPckY6h14BMm08Qt0VKo5jrHMQUkIKOvK5csfgL/Dat2/B8L/5"
"B40kZXVZ8eQqdf/ZGvQgvWrsKq6Sk4bdx4sNvkzVcEEYCVghcU2ZapBP3tsURoB3sqqF9TPG"
"OH6dcqWklKshsmcKPYO8MDYbA3mqUlYlm37Rxt+AWpqD+wLpeGH74/vtVy4EbKlefDkrAdFg"
"PytcusISWi1bsfOjP+wtY+vckC2anAzT7PFZ7YwwupTXCCkqWXu7VyvWD+10oa+kW6aOiGHE"
"6L1Wd8zGy6x9jCGWJ5MtGo9FRS6dFMT5woSU56LUTkza1mnzdmIE19e35HZt8Jd42WnyGZ0k"
"ab1RqYZM5tVWPOVAPy1PLo3lai5J9PLQFIjqBSuOHFJAQ/xVv9CYIe6fJelK3qvQW9PXsZk2"
"eq+NoWzqDTIsvFc8q4Iaj2abKHBvxSkZ/LUoPyva3Gal7NQ9lgb9tJqxeCzpmvDat28DXR3Z"
"ftAkV0zS0pDRFgpXt7k4cgY21c1g9u+qV3L9iyppB7KicNZ4zBqm756akwl0ro9cUfqNdWsV"
"RuQhpnT9rXZis4J5is7dqq1lFsmHtahDh7Uc2blzFW7jWWiaag1M3fL2pfvt1+T6OKuXjU5x"
"jJYMFV/HMEolI/m2oEn3C2jyVWWxMitWKJ0lzMx4QX8zxCbkMp6wY6N/jKd4r8GWXj2gldFa"
"PIkhzrLTWcmWNktStdJaFgpftFlcH9lmDZ+ubzGexcKWia0MHFZqFwZ/xsvTVJ0pEbjMRDJk"
"y5kSeVqhliQhg1gDTFnDZuaiFXNJIw0Turbq4s8skLu3YdA1frzZe81hxPxHpjiViB3V5p+4"
"J3F12jRZey2R8aSxmG+tXH87lDCs3JUWxHGvbcYDKmB2tMLL6H289u3/9gKYdnBaArqPrHil"
"w6ReSyLTflqVSNMefgsnqUTUmKcYf7j3IockS/vFlwRTv28vYUNmn9zuz8KI6gWxaRiRTq32"
"UVwsNuc2KzT94uB6lh+jxG2W8SdhRkjbWsVBWsgdAxbii6rXuC/b791vXxjqY1Kv8bN2tw6g"
"QKwgIvysvmTJX3pJxCK4+yV/hUC5I2YqS/5UjFt5O08KKaY+IhOwQrWV2eK3e1oY4SXvfnwa"
"wy2ZbVZy9DFKXSYU7sl0yweXTnsX0zZY36LVeb8/uD524c71MeHlDW9M9/PDBBNjBD2msUIk"
"vUsSgzOqmCSWveTPIlvZfiUl0+1YQQyneJDZ768qWv74A8g006z3GqKm3oekxvh0vrSp++XJ"
"deUKSObd65rNk7syX+UGKyIYHJVpGLcZ9dKbwtVTE5tO4dR+zOoFMrNlaJjoD+24nAE3K9xM"
"Ekvasr+SeU0kLTVMGfLoNTUqywoyDinKGM41UtaTq391D3yp4fMzpqyIUpH+yBRbe2swJvMe"
"O3zYdbmYTdaeQbECZ6xYTQxpFsVavjL0vcTLqBm1A8QoW7Te8OxF8xyvfXuWFAsrONKSSuvr"
"ci+s3Jys00tOJC24KyxIdYcXXxu0bWpL5qiCUFuUyIkw2HWEE+LRfnkYUVCo9zqGET7WeVkZ"
"hR6SKxqC1CbVqWvaHDWKHoSGPyRirMaNhzIajLyaIAihqJofR29Cbr9tv/0Wt1+sXuPifYZI"
"gSDcL1DDk72/gYIARKF1cu2bmHPLlWhLKYaVUa6eyyqpePUvXpYDDOl7yXTheym+taeFEevQ"
"IZ1pyAzF6KtR4Ztt1kxbyC/g9/ZeoiMZoQrUJjm222FbjpB1i34cyX0xXhfa05mSM2ISN4SF"
"7TOpsbAutTAaMjNwkpLV9JK6XoGRSjKWNYwgS83GNPzL2F14L2OLi6PWMm0t5sE2y/IXbdbY"
"/WSz8nB4QpBtVoCJ/RYPueGqF+tdM+dODUNR0A+xrsaxkePwnu9ov4J6wdt0BRGImYlHsTt7"
"4WpNCzltly/WXyUCjNqCpxrICI3HhFeBnPcR2WdrJauXea/C+9x1qxXNt2b1usNZHxXDwJFE"
"jXmyWRJtVovuyp464OwaSdCHBxZEWp3tyjS+VPOXC903oybitW/fPeFyBhbOn2UiFZfTwvpo"
"ulEyT4G8+96fX1LotWTLPLYgyLBK8KFLpE7n5MX2QdN7PvMnjQ08GZGM2WbNEqUr54yUaqXn"
"UmazLFOwtzsGB0fMTrlheMDf+lD1OpShIwZmBtYBCLbfuN++5JrUy/uCQDPz/R3sztQOdx/2"
"Qx+uSd4a4JewbzMiST8WzCX1ipGY1dNFSJEG3ds5jwjX92LRAs9DZGhVDiNOwi0vVdzbzTaL"
"Ay0E2lyHSHWkZFZSo8+SrQbyW6Z2ALT3OqBTcVh+axvUca39VUdj+2V4XUiYHmFL24ZC0WuI"
"TGcdwurhff6iMi1UDSdiVsNeDxp2FlWQhvUt1xhPLC4LPTUbtcEoRalixbI+nKkPcXnadHJa"
"ByvQFD1kJZtubPa5r7AhbsEclerZoV2KI9ZqqmFF6y+0jakQ9/bBhFcehDPP38Hu/grzBk2o"
"kg4RYc7ufVITc1PxuqNhoTJySEHXwmQZs5SOkSWwhus6j09zFk9pVj5pLDkt6qVm527N0sSH"
"YzqXP3NIdXg+M+8HYjqvuJhnz7rV30WIVG27THhd4DuysJDYlFx/slYg3Xph7OYbphmmR3+n"
"vAWqs0HD4om4uS1p8xjfLp2dJglfBQvKQmeFwwj19TmGsC6apZFaqg7ZrGZDmXnQFb3qONzI"
"h35G+DYDoFaOjVTxTRlbHaOuxweVb1iDVFFjvPbt+zMKEamw59I8r3a25hlbS+xMt7ic4VTh"
"5iAtMMc4xjzC255zHqYUcs+j1Uc+z1HU87kJm+LTk1FcQ5Dma4AX2tMnNsv5m0QraFUa+te7"
"qJVICxeoQ1qNWgv6xJrX1zkkADq08AJgvPbA9vn77cuuUPUy4yX3gVhSdR/BO8+eSdqdhcxN"
"vLlKnbcuQwugrmSM7NQ8/AtTxiZ8ozCC8y0aHvOozWIBW9qsAFPJEJiKjDJX9QQke0gRvJU/"
"az+ywzvU1yc7b6JlR8WD4GijUxVVR30NBet41eVOPZteCYLwFkDoJ6CaC6rXa1jTU+dVMmjY"
"iSFLGob5b0FarEqwNr0XrSk0cH5uJJrlarPNshpHLbhkswwyLmdi8sFkG4cAACAASURBVNZc"
"TlzeSKXYkD0oebDzHLk6K3wP4vP9h2x6hDw0HN1pXcb+PB4GglofB14vQ9ISxEdXfnm1myWt"
"tyXvojZ0a5a9KaSQ1EekuFeSz6Z/dB1jiPDihB+yWXNXtJc8GkflQhXP4clyNd/PRc2aeKo9"
"BqL36ojDd0jcSBnwjQIqOGQE96OyA60AFNIewsH9KI6rPS3UISjnICWeRv4e5+3l0u8LwPpE"
"Ly+mWE/EbgZuaciYpEnMhB5aJMYlEjqSwv1WHQuldt2SOYyYbFZbIXXHZnkcdfi+dBuEsHDR"
"uCPBc/07cEiom6FF2S1aPOEMFw9++721Bga1DaDmZ9Md/qh6MT1ne33e3+fw3bt/ysqznsUl"
"woDOt7piLrJlS3JIQYplSmmu6058al2H8czV+zbL7rPZAi1JM/AWXzLjfZ3eq3NoEfeV9eN1"
"fRofCf6OB8lSEzxIOD2kqYC5q6s4Dsbrwu013VvFIfT9Lqt5pkISAXfmnwvBO8CljS/bmDOI"
"CbWkYbRCPq2jEIjq6GlEV7P4lJ37+SmEs81aO/c0r2RYWOAJlg2914euTA8eW3TzxOsw5fZe"
"nIoNU1UhgoeHEMUdOtAIdTgwwmutXk8H5Q40d6YnbE1sq+fYiQV1PJ9KISJ5JS+RVbk0B1aI"
"M8smTLrUaQ2qprEPPjqPzvfKzUAaDsrCdsDv7VkrW0FCOP1ibqiRONjitxbKJmSEDod6LNGa"
"29lCRRMcB442WqnWkCw6Nt++7KN4vdj0vDjeh/L+FnD3nh/WvBGZB8rWCS+KZ/mPWzFb2rfY"
"zsIt62BufqyHZhrIXVlh0hkpOk7hcMKcNl2hWcTQW3B2X0YjsZHMNGWoce6FcRaQdSr0D2CJ"
"PC44HnAIcIGIX3hC1H6NdxQcsKsyPdu3z4w7hsUg64TWxMK6Qk/hZMn87MzQaAfoPIcUmBoZ"
"/sI7CN4Bjm81qpoxR61ITH96F+PnbzqW61Gb5dozOfdG6HAz0LuiuYuQ7FcXlYMSCg5I+2c4"
"qGd6ABQHjTmaBcehGwRBI4ObBwAVDw+jSo572j690faL9ttXXZ89XboeVYvnun/h6SmKeGfN"
"c+AMqWUeBvtDySpvJXZdFzVeNl5+OejqLE04uU/q0iwypXEyQvz1cIFHzoQ6yy7KGOripBuE"
"9vkModWW4AGUio8/qJeveqmmlr/XkEAAeNZ9Pe2Ref7OzPyq56Dkhe+fsvE7hTgtmRMyUrUU"
"Uri80Tmx5rTioCspoahx4UuZgomHS5ruWpYZt1zAAw20agUPBxU+2qZ5qX74mIVCj6+0I3K0"
"FTDEiYMxMYltY53DhE3fl/keHm7siOdQr3dnehkcl9hpT+XitCXuqVQHRmGEO3EeKpiCAytb"
"cwOQVxBv5QUFAhWj4r00Pa3t5czVi9J8TuofDsBqq+Ch+THATPSmycPHx0v6EnNUjbbcQezb"
"fziILTtUBO8pXs87PRFEnqmRsOqEoQA0MkItV2LlLFOw7CDlDp4vCI2y6h2IlE4dHx+xp7fs"
"ONqoOB5oVBYgXY3UffsSXd7lx0Tx0EilZxZu7cUpbNPW2Bfa+lQceeLiiKc9tVyegv47D5fz"
"KejHqmFxv2Q/33QGXHHvbyMp+GQyZULIU3t1O5uvIz032RA1VRZB9R35YL2BID+kKRfU2g+O"
"+wY1eTIIgvsmP26yZ3wMLqflASChhbHdwBqmeH3w1etlJrZoI3b2v1vG7b4oYKlY+D4TPIia"
"J2h/sw46EKuMbazTmXtogPotL2SKo5WwxvNTg8DkJ1slZkicsyRIrq/H9KoViAMvWdjlmoz7"
"S+yYl5yewv6dN6qr+dQ8nJfcubmxbcrEU+8l1LVhulukoZGRaqHlH+55nWkfe/1NS4reN5o/"
"uz9b53jCa4FjVJJnZvJ5alPJfHRHzvv1TthVnjDPn4QMeLjnglonFvtnMDIEuKx4vU+w6Mco"
"wOqvPb8XNzzP/dOnx7fzDO2R3zr5qXkXLueNrTo9u7RIjAvP8/r87k0DFQtFGbLy/D+VvYVE"
"pHq9qjLi8dKHBEJewua9jkleaP7+krNpdthn90m9eM+XUSKF5gG0gos66SJjN1d96lk02aXv"
"mwKIvrBABKUAQjgWNMEz3bhYH04Z+5G9PQoKUAS13FM+tuf2nWSFkSAEWFBjfVG2IChlQFbQ"
"PZa1GXumIfn+paZ5C4s9d3fJ0+fPltyfnrJ+L47tVHW4RDbgE3S+xJULrckogPTGXsK9y/aq"
"Eoup6H79hChF/SW2Zn95o1fxs4JADH/BGbK098YHkHFc9YZ/758SQbE2oJ2L0dMB1AhcvXt/"
"ie8Z9kqcP9uXb1g/XwTcZ2hoGLrCU4tuppwbMvup5qLWt/Ms+rCqKFxoC0IYgaitUXuYwvRG"
"6SsuiVmuYLXOsHY6RSErA6QCFO6Q9pvqGaCjzlUVIf6UTwekapG1+3kJ3b+did+XpeNR99HV"
"6wAmR5XUAnGXsFYZK8/icpCKsD23VyV7NPPHK/NqEmfmeX6j+T59kUIzojar0esLUApa8RXQ"
"UEF9iByuijqzxtgyJfrQR34+sZ7Sy8M966UBWvPyoAxnAvliUnpv/Wf9Jz+o63G5/+YNszjZ"
"TIk7uGjhM73hHdzoNEvEp8r07ne+2Z1faKlhplIXzUftILEP2frOkjGW3sgbL2+lCPVVdzfW"
"CDJACyXdS1xolRTi63ORzfPzz3A2P/1IcsTSjGl+vj9/9vE3HdOz67+4feRHbIj2WYBSIBK8"
"EU5aZELttbY6TGy3HVELMZW/MRVAsoez5cuvNbPFnkxIn2xJWC4Rsv7uggpIUWbsVaIOTOwc"
"RqESmS5AEg3Z3BTAuZ7Nqha0sNI88vyrn+jXdTG2e4n41tv1K9Hlo7TFxlicGvlokOTM++lC"
"r7XdfCH1AsmbxIfz97gvTvNqEt9U4kKDg2tijcttu6Xf+7jBIWmFACjGEHcgskqJGX/aJebM"
"HAUOO1IlRVjTVrb59TQXYgnu0E7HSMvDHpjn50gC07PBGD8D0NpAp8aXGgQXqnr8DvNAvOlb"
"+m62JWZink1FU+Iun7cGJsZERXx9IbZA22GkLHGoNkBcv07TFxQJbA3v1ZuQOhKnCCE1ekLE"
"G5W9+wV0TkdfE3oqhExFENE2AdPCKW+zjbue6Wo2n+2r3U+/Lhfi9XwlZEGIzwc+MJQoBvdc"
"U6xeFIXszDgzHxfalMT1G9VZrkfP6K2DqEzbn5ekjwH6JNy2yBUQLkWmW0ZYwfgTCaK+vgFV"
"TRhkQOYhhY0ObaDzzJQ50dGK4+9rmmDYedst7kt91dIA2X4980lpcgSR4ZBpy887+bFOCOr0"
"rO/PRgY87d07OXja07YRdlSJ50ov4WOqxSRMIuUzRuBfTNPXRFW6ndksmG7BCXO2rET24lhG"
"YWld4TikCPVRwE8Je/9YPZMzy/teZ2z/wXbn8h6PrTD3N4j/f69hUf0TLiSwz+ewaOAF8We4"
"RM4ykFazdZZBgyhtz3RhstgtyiFjMX8Jfnjm3JOMDbc0HTO23B6OPKL3GQhKHaJVKu3oQuVY"
"NKTwEXzMGV1xPvyJg2jIzIdxVMYe2QTMta1/Sls5acacTaQSKavVkOcfmVJjouZ53cIojkzG"
"MXkX0xKGk/foWUruu3nl3yXOy7QCpo9hW7Nil9qDiAdDpeNf2GbxVaiondhEXVd/YYXoiM6C"
"YaiK4jU226AhhTYb56y1MVicwZJF6wHuMG3n3p8lJDUeYV9V8rz/lLK+T9O9lWP7dz2NX5fU"
"q4zZbnhR9DjuzkN/dFuTP0o/pvkde3se3QjH3d83DlrOX2oubbPgXeLWeJ0mQaUYqRKNPL99"
"x0tkWEwPI6z10N+iEXnit2KDoedC2aZ2pbmxRuVmoEYwheoZd/Zy39vHcuzIfQSx0U+PuAIk"
"3jPWcWXWirCyvYSLo6Dpr171vmCxnyrNpLdK+5uViRmaNQ/xXVJmsVQv+63u2CzjKS2stJDb"
"if1+BF2cR1BkLxh4QHTjejmc4g1G3bSnX9M1w9mQOWoJqZOAo+/LVvK+N0YzduoFs5HinzIl"
"vayLtPfG250UX+H9LI7X9T/d9k/ZCJjsVCx3SJTw3l2mCemhDUNggBIiZ2zZzNJmjUJZ6DAm"
"hkpczd7DHH0xy1W0hbhkq6C18S4NRJiPpEhG3lqRChA3KtmNpQKKSgyxA1MJBGK6oTt+FiHH"
"It2DXsjzUfZstSc2NgUAbtcvdbyAYL/STp3np62NGdsNBh+3HzmO4vn0FrwFxKpXoAVrhZrf"
"otPyVmF/qBa+31XTrd6iLMMVBLYKHU5FP4Nt5zSkSM3JqVEpBBxoOdR+ITYCgiEjLLj7PLRD"
"7SUMhNHD2jMR4/zN2kY7LX2SIGCA4xWDe2YilUh7NrXdViSPdRahPBuyMkzP/C5LmxWoWpU/"
"UAABWjP7enX0NmNhxJAJiiT8TSkltR3aBGi4YKIqyJhVTMrGDLgz78/OLC0Je72Eh6ZwSRch"
"Cwph25fhevLCGT4rsjxv/PkOd7yaBEfV4oAZkD7ZPDPEq83Kx8SUM9WZSEKUokVNfILNCuGW"
"zctoxJiRAsZfJrK4qy7rY9ca814M2VlIkTQsNSrNnDWaN0PWf4LwR7Jpxhyb7WZGIaWybsL4"
"uJzqox/9U5F1Gu6uQPOkXhJ2YTLsBsdc1JitFrHDJH4hG2MNi9A8glSnYVrTY09ajb/yHJ+a"
"qe9Nwm68Olt9CGGwbiZpvHeEIBshhYb1c9zKumWjXpdgCW0BEbUEEzcw01AzUyzrm/KHxl+E"
"DPat7hdZTKUWvjDhdf3ft498543BGzRQ5WLzPssPouBZ69KQWioWE5nC22XhC1uYVsg2K60T"
"FWtQJRDRvzXV96w4W4Eqro9s7SXcCkvU4gITJdbX82GJbZKoYOqjIeMKldKNxt7faqgCMWa4"
"AUjreAk27OZGaMKu3q6/O+MF2DVzsiU32hoN7ZrrJssYJxe878dnYIGhDubnslkGXKfkzGaF"
"L2ABBMYxXE/CiFa1GVHym46g1dSxH3t2HRMQYc2SsBVk4TapWi6CjzkzjtBgO54Nmf2CLEvx"
"oS0MuNg9r5OaBbpwGsFB4/nCH4MZb9Srw6BNKwXXzWV2Jb5J3+vzOrzQn5W1zvkenXNX/YQW"
"OlQl2MWM8dJ7UbtZ+y4rBFDnrEfKhaSLUda3KKI1tIcUIdyq+tep9bPOVKWGpEcbBBa7NJM9"
"6DGBElFbwkTFzr2/fgdulqYhQ6ZkYSGUYJNAGZSc4QVxFbEfFKRMupvWoCC+JKSvk81KDJ0i"
"dUJb4izVU6OKzVbAq4+J0DDCA/rEVlSvpoMmbL/Y3qmiYiZKGFt7fzj95dhE3jIS8xYoG3zq"
"I+enDCYTFQs+bAamRpSysv5ZDxUL1cCIK2k7T1YjXtd222WzAQuG1LCA2uxK4ercsc0MIT5k"
"m2U1cbZZa9qIMyvfC4z6Q4pP7SnBOO2nQjnoYyRrjiQCWyV8ntZQUnAfHVgPKaroaOlFK5Iu"
"Dd8mCnOaP6mX0ZYxmuJZBm4O0oKRUm4YoGaytNRCrIwXbtcvXuMFjIFfEnehIcKTwZT8OBIc"
"FKa/vM2qar2xslmWvgbCSLTmMKICUoGmspSSCE0onOm+X2bvpZWBr+JbBK5bi7h1/iMuyYFF"
"CoMbM4y62qkIheQiNiOMkjk/a6ZkESBEOXSq+GEM1aYp4tV8j/qeLqOVNOihtjqTx6wsyxym"
"mUdsFnTnRYjzEFMJ61d2YFZDJSwZ9cu4L6gV7fAGo9XEVB97j1CjAau+nARMb6X4X0GLJAmn"
"EojlMjqw5OhDGNvf0oxdtE2BJDKCWYSstWhognjVly/HCM2hWk7QE17ihPGyJR+Ixa6eyI9u"
"1VdYRAz2I0h+iqFZxhb3wq1JsURbi0X8D1/nMMI0DB6G+VvT+bT2AS4nhBWGJkjRSZdRSMhY"
"hMpEFTMq/k0Whiw6fRZCaPABJdW4aRLqoOGLO97fuDjB6/qJt/3bt3R2q72Qf+UOwTyAYlaj"
"pv1CTQdpSdz4MtA6816I4TumcKuDNVfG1FocYQS8wTjCiDrY8kiCCEPVv4sN3Zq+aYWOOmne"
"hBRYSJEgo6jC41ZSMiMsq445M0MnXUA/tQlqRCo6tkd6Ng1BZIWzYWr2RhCg3q6//h5eAHDQ"
"r0mZglmxxKfQCkXCOmkmMYfJ1KcK2Ohd2Fd5a1HoQ+hkYKXK6Ol8f3HRMMIu8XcEa+8ZGL/j"
"gRIzUXddqli4E1KEqGLSrVCh2I0ZczwClnUreX9qhJ4hNZjgGdIzJ5g2yxIVFAv+zadpwkt9"
"SVhStMhOqBkZ8wDXpD1pV83Asakq0wuxcl05PlX+lroFtfNN+3yq7RG9BqA3GzEG57gP6xrc"
"xvhKYXdIkHFI0d/xJKSYR+ywx5eJOQTBCGeLrFqLNtNOkDqrnsLRCzmwuVcKCFCupozX9Xvc"
"9v++BU0iaDCO/AAB7MPMDukMqUhSI31icNnd+6ZiSQpKdoKXcBihJ15bJBmsi7LF9dG/QtH9"
"Qjra6F1Yxka7quESEodlEkachWZjXBgwtaoXwWpT2QqKyM/SzCx1lpBxy3GdbgzCbtdf/The"
"gyl9CxOMS/yh4XszIBhApMLHxNhescAiaCHLFUGGO+HWymwhxaf6VA8jejsxJPWM1In3cgHT"
"5aZbIQmzPqL+1BhJMWlVvp8S1xSGLRuVXklPyqtrUsRuvobUbMjA9VTOiynmNuM5XjJ+QUQy"
"AiV6+DJ59WzlqUUZVo4lr9jBw7ol4ePZDNv8JFqGF4cR/RQgkZFID8hUwELDUNOvrL62m8xj"
"sW5Bl0cNk9zVHZUsRxXcwEz2axmPUR0Mzin68fmEpUcNWSN3L5SQpSIrvFcew+v6fW77f9yg"
"fitBxgJmS2YszpqQSRSxIg+zzWIt5KfIwvtMHC0oFEaU4udbFB4/aDaLHf259+LDzOu1auQl"
"EdYgmAbjy0lIwSWMy2KwWYlUClo5jM0FNFq3eaEVfn8qpf9cT/VbAABu1899Kl59t5mA5Taj"
"jJmzmsg4NspFK+0k5smpumOzELe+GhsIa5FMY7mGbln3ItXEpgO8ZrYW3gtD2Ibg6SHt30i0"
"3GvvkIcUi65uqKjMBotEK9e+uIIBFAKwqElr7594iiWY1e7eULMyXNrJtMbr+gNu+zdtEoUK"
"9EM3uzKg6KhOyTvDkYqQhb1FaRZjtAi3piQidyyu4lObgRBb9HfvalEXmIyXccNaRR1ciLlX"
"IsxRE4D7uZPf8pCCFCvpFsetQZxmQxbHU8jK5jfmKUpUI2WVSNLakHVJw+36Oc+HV0fJKwL8"
"MqoMxxI72+sJskVp6xsXnzeGluHW3E4M3mt1X9UhoTPE1eAApqTeqqSVRTb142FVZS3hxCFM"
"Z6dlyJoStoZs2dVtKmXCM9uvFInNYytmm6+/I4dnoFeF5IxmwnDFe6b+MbxkXDmHa5+xAiWJ"
"NQlRlnLtsxXmsQ+ptchNwhg6jIVn7cSTMKIYW9Z7rb/5ki0rgo6amYEKOfQYKN7jaWVxTVg/"
"VsdICiIshxS2a1cVM91Sy3FpyFrUpLUhm5NYylfXEkgvfzG8rj/ktn/DJp2zSbFaGoqjjakZ"
"qbJCrUYlK/Ehk2Si5ctV3k7xojBCZHRejQGG/fCzVAJxp8Cbjf0eCJ9cgHbkQWCOEZywtgwp"
"Rom0/Y1QEIUVq5GEJImKhixU1WjIgvww0KmeTja/xbZkqq3WqEQBLrfrZ78IXv2nlUY00G9i"
"rj83D2NpSy9htpJtMm6YPNthYPulq6UGY1IsaGuxdiEvoxRW0UCVHS1UyaCqBl0YD4yiDgyF"
"+h/tW0drP8cTg7BmmrnK7gNn9Czrlpc/qnpz9BUMGdXEBFZYQUZs4zwlyDphl0kJnhOv66fd"
"9q/frBRagbA9vaiJyklo9E1WPXeqJH2KH5stfOGNCABCKo7lstZiKXoZkhqSiMxWqo+xJlpl"
"HJlqVTtVQ4MxnZ3WhJTMqqR7/Kod0lhY+2zzT8ZQcGYWDFnsWVo4+tQU4GQfoeVoL/HaWvvw"
"itv15704Xn2Pdmfai4s1JIPhjZUx2CxuVaUKuLJZQecItVmrgGC2kOJTrpL2k/KwwRIiCSOm"
"TPUxSFfDpTtgHVEoh37xBtiPIIE5CylAnD0+Hp/N2bpczolr1K0W3RKPgF0Or0g84axEUhPy"
"sekRvK4/6rZ/nQtY352Wiwbh0WclLqyxMuY0i5v3Rkl/IbcxhRqkkSfTRaH41HTOB3XZ4arD"
"BnMkcQBs6kv4dv75j3GBoNBgpMK9PDvNadNf59K0WudAa0ofUkNSCClXI2R0kiFzZWJPUBxZ"
"sD5hkjqupMPD3q6f9bJ49R1pDgzJZpXAU7JZYWFEx/hglUoFdHgs7QnAFG5B542nTpj5ng6B"
"yZVBNthS0ZJUFslytRIPjE5e09NrVa7CfAMs3z/hDDJakbnYJci6osy27F5zcmKulYBFcGDV"
"L7q5tvx1/Ild3mDPJu42GJ8Dr+uPve1fuxk3eIJzZ6v+qM0q9DCUS8NOwgqNroAqGAYLpFsu"
"Y6mdGGVimC3J1j55r/k4qeq3gnSRte8/yMWak1NI0aSfVFJKMD0zbdX/dmRmq4aaaLFWzsPI"
"hEk0Z7JyY0HtkvenAgrcrh95NXgBQHPj1eYxM08ZqjUJFa+Q/hDGOj41HK32nYQRfOL1cI2U"
"1EtUrMSWmy19GCojhj5ZNgGu9S1k9+OQEFKyFjRMRkjB+rGEzNCJ3ZRz4sqceTVs8cBKz6rC"
"BTfGNdHAIoP/BNf1HHhdf+Jt/5qt/4gsNinONu0pjFoizx5KeItEGGuVUzUj1QUMOq+QVXG5"
"ajZs0LRK7rIV62OojNDGI1+HnAlbnZ2mWrWOKmJXN1GyLJecaflqmJaklmbsDp8NWaDKOK6Z"
"tla0F+invUq8+tSORcq1GIQjMdwiQUpZV7L5VcsKiLMcn1oPjyLFuZf1AoFGokLNlqQbYpUk"
"1PyYmXOvhkvRPEGGQIIiCa+YdtpVC+XSIVue0j0KnFWiOQ+rpzKW4UsDyLhRWVTGJVMV1lz1"
"iK//cNnL4XX9qbf9724mTgmyPp3arKl5iPOxgakXKImWyKCwX/bNuhRFUUMKIw60ohe94Ugi"
"OTCsHp7nXgBK1a+pehy8V0OBfzYzZGgYJ32chRSuWGqrZ++VIgw29cHdJzWKMtbrZm4WrEaP"
"ccsUl9v1p7x6vABcf9pt/9tbnXhi0cKJzTqjyksk6xAtT3h1npZhhPUCXczIjxa0hg566CKx"
"pS7N2YqFkqWrYhgPP5m2xSak/Rp6JNwPKYrEPx6TZ+i8D7539UpNy1UYlvOICR1vVLZFVfU1"
"5Xb9yU8HBs+FFzA8PutWsFkEWVGXw5EYtwMMyhk+RjBfSZAUi72XmfoKja+IGwu6fCEo8Yok"
"NbpsSfJefeY4gOLidCnu5QNPCtlZPOGcPSmkiHGXs9Xc+DN58/VRWtpCLHwmh0HDFNnh6J+j"
"LL4IXteP3Pa/sS3t1x2bldec1MsTr/NwK1+XCy5dEC+Ohb08uXv7m4x9OE0a4NU46Fp5r+Tu"
"ezsUTWWv+FE3NImQ4ibkvZAihewLyMqapKBbbLYmo8ZLEjr57RRoa1cCt+uPe714Abj+rNv+"
"17bERyGkgs3SFXgm1Ee1WTUC2qwvxZ61wc0965IgdT7qhn4ihwxjDJwnFMxWCrqi95J0q2gf"
"R6ldcryvCUJnb3PcBXXxVjqnMRTuwEIrElFCpqgiRPOcSsy5F8JCKYHOkHrElbUmQsoLsIUX"
"wAvIJdKkazni1GN97l6cslOkmohxWLeIFOAF0dxxoRzVElQYQ9q+K2S/ZraS0+IVwB6/oVa0"
"B/0kQj8FKZOt7K1jJQxzPze3Ir2fe1YaFqoW8oWlG8sLDcc4sMfey7RTYmFt9YkZ/Ty9CF7X"
"n3vb/+LmcsUWKj7MY5qJqjk7BaUPgLsrWx7GoGLMm6m/KFJAaCeGG3uyQlWS6+DkvdLtOOi0"
"PPb1COUvW7EI2cWsPYlZ/ivdASAeBhgVa55JGWkuo1woMbE4DfxCvV0//QU4wQuqF3D9Bbf9"
"K7dgs+ZOaGLLWgBBt2QEVHO7kgMIvgfl9WLDuTBAKdpRPf6yeoIMq3n2XiZmhn70XiOeUIj5"
"zw15K2cOI5LOKWG1ARRStD5aGisOzkKK4LrOZIwlkBKynHJZTK/dUIrg7fppLwYJXhgvABDv"
"BuFKl/sQ7VCmmZydim+Vxzv0+1Qo2dRzGIHqBbHpeWZN1oFq48GDXB8JO0YqeK8DAEpFbeO1"
"TfyKZd2KMWGi/e5zas+Bftezy5NCihVkzYop26+paZluYbWoYe35+n+W04vjdf3Ft/3Pbuy6"
"mCSuidlgcdc1ixbXRHhlRLT2hXqEgGG8zF2lmgj+JaF/77NMfmtVH5MbC3aKZrq961ctdPtV"
"glaNnyVe4XcVT3TgWLTmgmjpFAJkHFWkDu9EYUgu5vDCmgu9tfhDXpgQvJR6Addfdtu/YmPP"
"Ptss6wi3ZxfxqZY80WJXWefI2osNkeCRg0oSjIY6fiL+q582wKsg9mpPXp6zCZkEzB8WJanv"
"6EM/VaHUPtqvplsJIUVuS/aQQjykCIUyyZX+vhmj2Im5rIxJqIJM9tbiS7GFl8QLAATtiD2P"
"5uspMs2NSo6yoEu48ZgUK5p6VzUKIwZMQnu9umK5jBEWTR1bU/eW5SrlXqpPHVZvDKpjg2gT"
"1c5RUGXyJqRyNrQwyVg/pdto8E9DoATjNaUPwcufpBK5/zF2FkmD1Nv1B70sGy+P1/WX3/Yv"
"31qjRVGibElY+IT41BQLcN2y4iiKWjjxOioWRxLjZooVK6PJXqNuR0zSNa7vJQB9u+T9vbUI"
"xyhY+5XxAtsyDymST6qnkAXa5hGIsep5cUypB436ejnLZdNLqxdw/dzb/sc2NyUEk9fNSBUj"
"yMwVtl+GlBqs1iAIpt7GOkNrIhJhRWslwp5ikkZC8Vg2wd4raEHFGQAAC5JJREFURDAkeBal"
"el8Q5fhu18iEBciaczaFFGq5TMlmDRsi1PLyNkuUEiapVlZNIj715cHAK8ELwPXzbvuXbS3p"
"lj5M3itbLupPTFo1llsYoSXGrh9e9OSfLlG9nYg6sTUn9ekiXma2ovdKjccUT0CL4PhDvjbi"
"stFpKdR4HC3NFFKcPFx0RLLMWAtxrWE1a5UQUi0CannHOFIvt+v3eiVU4FXhBeD6+bf9Dylh"
"VPL6VCJwLeLFY7ZgWsVhBNVK7wXqokWaBK6JpmFqs7zZmNJUdvcsRTxPNyhhRbsKhgiV0IE9"
"miZtfKNHQwoGaxxXS6flhTJeNyCL2dS9E1CLfUfUVrhdv+erQgKvEC8A1y+47b9/k1TykGsi"
"NFkAgm5JjE8rHf3WdW1pKv+RKesF4rK4iCT0z23YwpI0jNxYromxIdmH5bSGi252VMDipdBt"
"VjT1nTC7SFM448NSDAspwslhE2eZpDkPO0lcF09VCG7X7/YKecCrxQvA9dfd9t+75YZhqokT"
"Xr3SnfUzJkNmQZffZPxcRQnzvN6cu0TdMrtGqDlMPK4w3Yo6ql4B2+DpUgFttGJCahxarG1m"
"HxHAWnR1Z9Fa0db0a8yG7A5nEdZXzhZeOV4Arl9423/3tmgYcmvL8KJ7TGEEYxd6gcri5J/+"
"m8PGRxzh2UZghbIoka3ovWbLNVbWV3Wd7m1Jbi3a1xSVMVAeEUqhYdfO+7n9lIrmRXPWsBS3"
"PnqKETUIbtfv/MpJwOvAq0/tiNEogLOxgaswwmag61vvtUALnyJSJ8j48K6JrWjtja25LLrf"
"nwulEmZ10IJiUaQgXtyFhKpZphpjMFg/d6MwdhFSWJRKrivLVV+nBYx8Jg74aeV2/U6vCYPX"
"gtf1N93237H1uDVVybluptEQHSwulCZaYx3zWCZXrFKab41hg3XBFpOEEkg6K4vLxuPAqCtl"
"HRkpaM3K/Y9wqfNM1dKK1M9tkEn/80TxG5ogJ2u/rIxrDfNKers+9xjUp0+vS72uX3QDsH/x"
"NodbaUnCy8MIgWgrkjsZK3VdQ8MIP4Gx6Az/GYRJt/xs7OKC5AzNf8Zx6hrS8j6Y4K6LRpAZ"
"WHNqX7Xch1Siu7ppUOvqunM6MmzR/0NCxWnZoia2vOde6fS68OrT9bff9t+8pQqICSm+5zCC"
"YwhwCqDOPaUPDtNEVXZgFEl4fYS/PDUVA1i0gr9WL3XjBkm8hrqMMTeqUuAwLNky7Req3hEZ"
"NSwXRAStSqjFqOJ2fXitex+vKvu/M11/160d6DfRmXQ7HtAOtAdd8oDjwHGgtbCONEhDO4A2"
"5u3WpiXjdtA6hy6JM4irjeVH2GzTe0C30yBC30s30h4gAjkAoXfk950+G+bPbyuLL1REuiYp"
"VUfCCDgqWh3LDyqI3oSskHq7fsfr3vV4A3gBuP6eWwfroHsjKWD3oM8+QPrDhwHf4MP2us0r"
"IrxOO/Lu9D0tg1rbiy2u0OI+9k0dkAPHxx2XPiTa15lpfoiwEoUSvwL0QPLPIPFIaBAhwg66"
"DxhVmqm6cobsdv32N7Df8WbwAnD9fbd+ZBtSxwMd+kvgHvJCjJ+Y1Et3TFcLUWFDIobY4n3W"
"lNSwJuNyIpCDaahWKStdYpseD13eOGXgLyvTBg1B8Kci1DJMpkbLKCtw5jX0dv3WN7PT8cbw"
"AnD9A7dRLyJVVhmPuTg++J5GcwJa3OtoURWshgoJxiROLVXPI1co060kYE7Jx30hoCr4EF8u"
"Ua4kvGmbIObvCPGHsUSWIV0sWkd1MVvcqrL1P97YHsebxAvA9UsDYUGfzpwZFUdzKog7hqub"
"3QfrI3GXq+x5eZ28V4veK2D3EN+XZNhXk3g8PKyqp8kV66vJlSpZN/i2QpSuSbTaamG/x+V2"
"/ZY3ubvxuluO83T9wzcA++duheJToUvu2pgI7t4GnZ3V7O+1YGQQiGGEjZ4oaXlqNlrQFduP"
"bXWOUJuGExY9w7FAoy8LHaBDVQtdTU7TijsDWXNIEUZPeFpWc5uRW5Hr9uPt+s1veEf36Y2q"
"l03XL7+Z+TWnZZZfVA+OBxwrgTFhyNYqlhgWDFe7aJZzfeQiGC1X60KVNOyI87OjEq1x9FG9"
"8Rs/efeCi4rJ6tsg3X71FiI3IdnCH5UK4r9/K3sZbwsvANc/PQplyiOYsxat+rK4hN0ZV3N3"
"dYSZgNoKX9/N0Xv1mrt8bXKETHmb3zTRHDmzL7t4iR9g3WbhtEqO+8vt+u/e1i7Gmy+OPF3/"
"zA3A/ou3o9EYVM0Y7bLhtaBV6gXS2lS19jWs/wwCYjSf0lTveUSomCE+nTuFNOUuOsSIB6/m"
"Pko6regSs/sxMK6ddnKj91HaClYudSZ2dadhzb0gfuPb2KthemvqZdP1z0e/H6XLwgj2wi4e"
"FEZke95IUU4euuqwRLVJXeiWloyPJ7TQ/P4D0Ovmg3/s8OFj+zGUePrASE+5gM1twwqxgvgv"
"3/aOBd4FvABc/8INgtY0jHjIv3iLVcNL4UnD3v0N7UWva8nfTNWKa2UOMngHQw2ZNVR5C2ab"
"xFuvoCYhw+RbMH3SD+/0y3i4aEVyJIHL7fr1b3uXjultFkeern/5BmD/rK0dsMvdVL6ch7YT"
"rdMahYYN6nwvlK34yC17eW4k0riJBq+evOSsM3tcpvBA7cMSGxpwKWiHXjynF99GI6Sb1kTB"
"BQB3L+owinBNihb6ucXGLYq+tk1nE6Herv/kre7GPL0rePXp+tU3APtHNuFT+OlSg+OermrZ"
"QwfRIak2Jif4MEaHxqkm78VmC2UBllHYHnTErF0HoKEU1DZyhIFv049kVzqh5EImwiye4PtE"
"W4P3c0NQu/2C3K5f+zb33Mn0ThTHNF3/1k3PZdCakmoTuZbG9Su2EL3kxVZYaBhSv1Aj75U8"
"Fj8ctVs8j2gP/qb2Ru3j9Bms81u/UUuNzTMrRluYg9nuTYF6u/6Dt73T1tO7pV42/a3rDcDP"
"3rdx1NIYr+Wt2SguoSFfLFE0IKefRhbkqpA+Fbo/EzBLR1WHoJXRFEuEAl6TXmTFskFvOVmN"
"YoY2tR+BA/ia6+3t7qn70zuKV5/++vUG4OftWy8KlUcLluGB7D6Uwum8oPXMufeabyOgNy+F"
"SGQb3Q/jeLBSaH8Xp1HogPFiG3LY4xg03Wzze9ty0/frWH/tuw1Wn95pvPr0l683AJ9NSmZ+"
"K/T/TGwNreJeoKhhUggsU5cJuxRrVTVnTc/vHTNFL9YFUi/rm4qyBABCJC3Ps13egH/yPoDV"
"p/cArz595fUG4JcZZKk46sUmml5SlZuNzJD3LSp8btgBKHnLoc8MXAEu2u3o7cROQB2c2ch6"
"QEXL6qDNE2EpVs1xK/Cx9wesPr03ePXpz15vAH7FvtkvzhfAGWOjLYNQ7MDixPVRvJGYG4wl"
"8LQul3RNr85EtUvem7XiRiIAkEaqVnn3dou0wT3WN7xvYPXpPcOrT39af+tfRWLW6Jwzi7VQ"
"opKB2LJ44uQcoVNTD/qjaP1KheOfP/RSyDxBYwuip8H7i8IZ24IGfNP7SZVN7yVeNv2R6w3A"
"F+xb75lJOWquhtb5OEWs3FQETjlLPY9Fzw0S6n8UxMInQeeaXfaMrpZjCz19Bb75PQerT+83"
"Xn36Q7onfsO+QagIElue3Sdfn7KJ2GycwQoO7Bg49qlIhqzPHA3PKtpD4OnChKlWCfDfPxBU"
"2fRBwMum36P75reoOfMKqEhhqo88UKLRaY93wEoP+zSsVcvr9Ey4ml3D8Gp2xYMGfPsHiyqb"
"ykc/+tG3/Rle7/Ql+9YvVHHpt4Jq90AtY3mfGfe6ctXbJd0XXApqC8/aq/pftrKVi+gKLayM"
"DyhSPH3w8eLpD+5bBmUGjilc4lVRWgbuUpyqS0E5dJviqxXgkz4ESPH04cIrTX8y0rZQqZOn"
"GLXKVElYoQCf+iHjKU0farzuTF+9b/cJs4UF+JEfbobuTP8PvRhXFuF66lUAAAAASUVORK5C"
"YII=")
#----------------------------------------------------------------------
RGBCubeImage = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAL4AAAC+CAIAAAAEFiLKAAAAA3NCSVQICAjb4U/gAAAWGUlE"
"QVR4nO1df+g0xXn/zD5qWzR/qBQlRZQSSSumIbZUli4y0L6lNAqNaUkiIRpItSVSKi+0Fpsm"
"oCmSWtJYIUmbxgZepMGS2hQ0JU0zhA3S8KbBJBClQQzBUg1Jg00wUfemf8zt7jO/du/ue3c7"
"ezcfRJ7v3N7s7MxnPvM8z8zeK5RSyMhYH8XUDciYKzJ1MjZEpk7GhsjUydgQmTpDkFJO3YR0"
"kakThZRSZ/bEkakThuENgMyeGDJ1Auh4Y5DZE0SmjgvDEmH/h8weDyJnk4OoTkk0oEY3gmr1"
"71M3J0WcM3UDkkN1vcTLaAAABDRAKU8Rilr968QtSwyZOj2qN0kslqRZtIWdUcrrCVSrf56g"
"ZUkiUwcAqrf1SmO40rQfNfaVpXwTgWr1j/trXKo4dupU73CVJqY6HKV8G4FqdWbn7UsYx0ud"
"6taw0gyrDkcp30GgWj24qyamjWOkTvVuiWaENMOqw1HKWwlFrT6y5VYmj+OiTnVa4qWVSLOK"
"6nCU8t2EolZ/vZ2GzgHHQp3qzlWVZl3V4SjlaUJRq784UVtngsOnTvXecZ/m5KrDUco7CUWt"
"/nyT5s4Hh0yd6p6h6GnYPgl1DEr5XgLV6s82rSB1HCZ1qvs2V5qTLFg+SnkPgWr1JyerJkUc"
"GnWqD51UaU6+YPko5X0EqtUd26gsFRwOdaqPbkdptqs6HKX8EIFqdfv2qpwSh0Cd6uObRE/D"
"pNmu6nCU8qOEola/u+2K9415U6d6aL08zeqk2YXqcJTy44SiVrfspvp9YK7UqR7eldLsWnU4"
"SvkQoajVW3d5k11hftSpPr19n2b/qsNRyocJRa3evPtbbRNzok712DajpxRUh6OUnyZQrd64"
"rxueFPOgTvX5/SnN/lWHo5SPEahWv77f226C1KlT1ftWmqlUh6OUnydQra6b4uarIl3qVGen"
"UZptbUScHKWsCVSrcromDCFF6lRP7CN62s9GxMlRyrOEolbXTN0QF2lRp3pqt3maqTYiTo5S"
"PkEoavW6qRvSIxXqVE+nojSpqQ5HKZ8iFLW6cuqGAClQp3p2ep8mNTd5GKV8mlDU6oppmzEl"
"darnp4ye0gzOV0cpnyVQrS6dqgHTUKd6IV2lSV91OEr5PIFqdfH+b71v6lQvpq40c1EdjlK+"
"QKBanb/Pm+6POpWeh9LMS3U4SvkigWp13n5utw/qVOdINGg0MB/SzJE6BqXUhKJWevzSk2G3"
"1KkukHgJzQKYG2nmtWD5KOU5hKJWL+3uFruiTnWhRDNj0sxXdThKeQGhqNULu6h8+9SpLpV4"
"efakmbvqcJTyQkJRq+9ut9ptUqe6TGKBpgHmT5rDUB2OUl5KoFo9u60Kt0Od6kqJlw+KNIek"
"OhylvIxAtXrm5FWdlDrVVYemNIeqOhylvJJAtXryJJVsTp3qmsNUmsNWHY5SXkWgWn1ts69v"
"Qp3q2kOIno5ZdThKeQ2hqNXZdb+4NnWqUh4DaY5BdThKeS2hqNXjq39lbeo0x0Ga41GdjbE2"
"dRbHQZpMnVHsSnXmTppjW7A2wPZV5zBIk1VnFNtUnUMiTVadUWxHdQ6PNFl1RnFS1TlU0mTV"
"GcXmqnPYpMmqM4pNVOcYSJNVZxT7SAnOkTRZdUax25TgfEmTqTOKdDciUlgWuZHhIMWNiBRI"
"k1VnFGltRKRDmqw6o0hlIyI10mTVGcX0GxFpkiarziim3IhImTRZdUYxzUZE+qTJqjOKfW9E"
"zIU0WXVGsb+NiHmRJqvOKNJNCaawLHIjw0GKKcEUSJOpM4q0UoLpkCYvWKNIJSWYGmmOU3XE"
"OhdPnxJMkzTHqTprsSGfTR76lBsZDvLZ5CGbGxkO8tnkIZsbGQ7y2eQhmxsZDvLZ5CGbGxkO"
"8tnkoW8hUyeOdDciUlgWuZHhIMWNiBRIk1VnFGltRKRDmqw6o0hlIyI10mTVGcX0GxFpkiar"
"zijy2eQhmxsZDvLZ5CGbGxkO8tnkIZsbGQ7y2eQhmxsZDtJNCaawLHIjw0GKKcEUSJOpM4q0"
"UoLpkCYvWKNIJSWYGmmy6oxi+pRgmqTJqjOKfDZ56FNuZDjIZ5OHbG5kOMhnk4dsbmQ4yGeT"
"h2xuZDjIZ5OHbG5kOMhnk4e+hUydONLdiEhhWeRGhoMUNyJSIE1WnVGktRGRDmmy6owilY2I"
"1EiTVWcU029EpEmarDqjyGeTh2xuZDjIZ5OHbG5kOMhnk4dsbmQ4yGeTo7ZuNIBGN0KchwwP"
"6aYEpyTNjzQAcZ5AUwDQugEgxKvcvjhupJgSnJI039cAxPkCTYGiAAiAoJ8EoPWPAAhxkd8n"
"x4m0UoJTkuY7GoC4SGBRoChQEBYAFWgIAFAIehVAWn8fgBCvDvXNcSGVlOCUpPm2BiAuEUAB"
"KgBCUWABEKEBUCyNosACgn4aKLR+DoAQl0e76QgwfUpwStI8rQGIywUWLWlQoAAWS6UBCATj"
"8fBPBb0aIK2/DUCI10a66sBxpGeT9Tc0APGauNKYBQtLdwdUoAFAyz+XCvSzQKH1NwEIcXW0"
"yw4UR3c2WX9VAxBXCzQFzi2wGFAaQgEsChDQUFyBfh6A1t8AIMQvDfXdYeGIzibrL2kA4g0C"
"6BzhAkAvMI5r7ChNoLz7eiHoFwDS+isAhCij3XdAOIqzyfqLGoAobaUBel3hBgD4SuOUM6Ov"
"hwT9MgCtvwRACDnQjQeAAz+brD+nAQjJQ+5eKtrhN74LM7pyS2nYgkXEjI52nQL9CkBafwGA"
"EKeGenPOONizyfozGoA4xaIn4whzA+hj79419so7AwCoX7Cccrt+Qb8KQOvPARDihmiHzhbp"
"bkRsTppHNABxg0BTgIqlQiwdYeYRd0rTu8axcltpQPFy90aCfgMotH4UgBA3DvftvJDiRsTm"
"pHlYAxA3spA74NLCir2XrrEnMH1wjojS2C7z4I0E/RZQaP0pAELcNNrJs0BaGxGbk+aMBiBu"
"CobczKeB7RHHYvKR4NyL0l3fmd+IK9BbAGj9SQBC3LxGpyeJVDYiNifNgxqAuLkNue2YuTVg"
"7yeQVR7I/q0WnDtKY/nOiLVE0NsB0voTAIS4bbi3U8b0GxGbk+bDGoC4rVWaaMzsx96h8o2D"
"c+eOQ9F7fyNB7wKg9ccACHH7GgOQDGZ5NlnfrwGI273knjPvnZAbTADglHOjDbnhBedASGD4"
"HYMtCd7RhPG/D5DWDwAQ4nSsz9PEzM4m6w9oAOK0wIIpDWDFxo5OdCF3HzzbAtOvXMY1iQfn"
"rsB0KxciLeEXcM2zWiLoDgBafxCAEHeuMgopYDZnk/X7NQBxlx1yA3bszaZ1rxORmLzP/nWO"
"yGrB+XpROpeuyAVFgQUJ+iOg0Ppe088k7h4YhRQwg7PJ+j0agHifmfrnggoAy1HvfNumQMEW"
"FDDxWP7pG+zrA9G7+dRxjYtWaRxdMd9qPCUzuhUo7wwYH1+cd5fRJ63fB4DEvWMDMhnSTQku"
"AH2nBiDe36ZeePRrprLr5GJECQJftwUAdm2Wb8Rj71WCf6ZkjoCtENILuhsotL4LAIkPrjo8"
"e0SKKcEFoO/QAMRfEhqA2FznYXanGb3rENn6NobruwwojVMtP8oTz/65rRrdkOeRfLhVgj4A"
"kNanAZB4YK2R2jXSSgkuAH27BiDub3u/m5R+FL1cI1rfNhxdjwXPsSi9u8z1ncdi71i1a7XK"
"rlbQ/QC0/gMAJD627pDtCKmkBBeAfpcGIP6G0ADnOuk1rgS2JETLR4Pn4Wq7y1ao1lKaWPA/"
"8PXRao0f/WGAtL4VAIlPDI7SPjB9SnAB6Js1APEgUxrAimm5EwpPafqNJ9t3GQ+eg2F8XGkC"
"1XKlWTH4755ulZifh/RGgR4EoPU7AZD45LrDt0VMfDZZv0UDEA8RFp7SBMLsgSiaTVln6o9m"
"5yyP26t/3OOOJf3s5lnbEaFmO/WEQ/pOgc4AhdY3ASDxKXeQ9oLJzibrGzUA8XDbO+Gpxqbs"
"cmb7W9+OErRGrNwVMDsIXzE475QgXL6L+gPdIuhhAFr/DgASj64+iFvBBGeT9W9qAOJfWp9m"
"4cXMnRGLckeD5+GYPFzPXOsX9AhAWt8AgMRnx4dwS9jr2WR9SgMQn7VD7t6liJzKi87g4Km/"
"uBIcdP2CPgOQ1qcAkPjC2DBuAXs6m6yv0wCEYo6wE446HvFAlBsLnp3yo6xfkAKgtQRA4vGN"
"KLEqdn42WZcagHicsGDRUywKtXxDVr6t4Pk46hf0RaDQugRA4iujY7oZdqg6+g0agPhy+6gj"
"wS3b4g6X+7G3F9y60a83U4fK/fPL3Uqx6/q95MKynkiUPhpSFMCCBH0ZgNa/CIDE19cd6FHs"
"RHX01RqA+LofcgeDT77FPRyTrxbcBhL/bKb69fe7HHb9MSVwg/9IFB0767N2cuFE9Qv6KlBo"
"fTUAEt/E9rBl1dGv0QDEU+2jxmJjf+PGFRg7Jg+4nKtMUK9+f4KOurSuUvLgn9c/3AB27BCh"
"+v0H6ZwY9wGHfWe/fuNEPwVA69cCIPGtdQc9iK2pjr5cAxDfYiE3sJyyFIktY+V+7Bp6U643"
"/HqcwzqjN+p8jlgDovVEyp3kgn/scMUbjTdgjZ4U9DRAWl8OgMRzwfGVUiqlgh852ILq6Es0"
"APFcu0kU3nD25oc/76NvxI3GtLZH6ShBtAFe/YOv5IWUbHgPvPNhI0/q1x+L0mPvHAa2wIIN"
"cN4t/G+AtL4EAInvrUuADqtSpyMjVx19kQYgvtdSIeBRtg9geY7+tPDy/U7ICm/+uS4nv5E9"
"QQMN6OoJnfixGhC8kf2kQzcafNLYg4SV5oRP6napoO8ApPVFAEj8sBvlFfmAtVTHsMeojj5f"
"AxA/bEPuXmBs+rse69i0CPyMTWDeuB5xfyOvAYHg1txoteDZfSMn/qTujQae1G5JwLcdvFH/"
"dvPokzo3CjypoP8DCq3PB0DiJQCAllKssmats2BpSCG11gDES+ZRz0UBgE1TAlD0RncSzynv"
"j4eyr4OWua9AeWsgVL9f7tTfVxgrt+vvDKd+/0ZOPf2VTjmLjHg93doULo/X7zjXndFJjlN/"
"uLwL4w1pfgLQWBlr+joaQgj8uHDLqV3GouXF8n8LWMZyirSXGaN3+rgBtni1iVfA8kZ5eec+"
"F+wymPgOyzMSvNx8zQRNYPU45c64dvVYvFmZQN2i41zgjLT/UodVztUdlngbKQqmIZx8xI/P"
"WYs32MBNhgbEAq+cC2pPkvvt4AmSbsXtoiHjFS2Y0YkT7GDYPwRDZqQLLFoHq/OFrXJumE/b"
"zi2YnhOsdcHc2ve0zAA3rG2uwZra2GtNFxA0hdUGfzEie303t+57pn0oYkbTdoi/g+EfsCen"
"P5eNVI/WANZxcjamzpI9LyulZHW9u4L2C7C94loeHIKOWz+ZnF6IHUiIhXKjDlDA7VjhLLqp"
"x2lbd0GQ+rzNHfV5uSUA6F2W2NvNQ8E/6/muPztWWVmMZSPVP9VmPKWUtuSs5O6sQx1h/dXF"
"XLL67WVHBzw4NmbhN6Qcxy0eIAwPVS/1o+d17KFyhmSNoYrRbngLkwuDd6rQoVcvmbw8Etub"
"JjVOfzqjUKAAiNSZmo9jYHRXocOK+Z9RyOrtQDvqi3bUzaA2zFh+Gitvv25cVMtoO46Xo10u"
"0a6STasZDbtgWd4a3SqJdlHgTe2/RWhCBm+qadKiM0w7fcNuatc5vdHW4zYVaIr+iZxe6jvZ"
"aaHdt6yT1d/1pDkhNlqwQlD1GQCyeudS58NrkOdIxrJwlkcZTL4FYxbb5ey8bMt1dXxYr6kx"
"J9dZ71bxgle6IHhlpM1d/qxrvCn3I1ayKlQf2RpjOmxNdRzI6ve8SUkBAeD6EZhejjB4AjAw"
"7bgSdCIHYioVlwowSeg1z5eK7u6OyJE36WOa4clYWHGZXHUyNiK9S0P91fZJY7Ar6hjI6g/Z"
"WLLFIqzqsJenYVVn8u6MUGCobCoEhiq2tjL9t2jnDxXYHIiUL3wjRjtuOItsfG7Y/anu3RVp"
"DHZLHQNZ3QmEpt3wUC2CUuEpAefZNocq1FRrCIPUJ8uh4VNiwKGJSq8zc+w2x7zJBam7d0sa"
"g31Qx0BW7wkPai/vITEPTjtfzDnPwmMWGqqojAVp1349MGYhAbBWSc5Cu4XBRdZdhvymwp4w"
"7fL0p/sgjcHW3ORRqPpuALK6Z+m+FZHdxK7EGMTzql46NeBuEwB34ybmJlvudtz5LZysP69n"
"LS8Ytks76JibtN7ABWipSaT+eH+M6bA/1XEgq/viYh4JbpvItFsElSCmEEHXNbg4rp8sCLuu"
"3ioZyxoElsvQKsmWS3X7BKQxmIw6BrJ6AIi5QRRW9YHgwhmqfszaC+BlTfyIyU/8uP5QaCUK"
"Lz0hVg0sPdyhia2SraFum4w0BhNTx0BWfwv4YzYoFQNDFZYKrBfchiPwsbyiI2M+9WNZO8eh"
"GcgaNKRumZg0BklQx0BWf9+vTQOqzsfMCdFXGrPBDJAzVK6MwVaIYNbAy/cM54LXSRaotyZB"
"GoP9ucmjUPUtAGT1D0uvMPybfo7DGH9hZXlB527baWsnPR1zcp0XeP0LfC94wIjlgp2HWni5"
"4DcnxJgOCamOA1k9Eglu7bRhLK9jrUfxvGLAoYG1ETbk0MQ0I5QK8r1+30vzHZo3pkgag3Sp"
"YyCrx4BYXieSAo7l6AZSwLFkHUIJntX3SZwEj5UBiueCzdr0a+mSxiB16hjI6t8ibpAjFcx7"
"iA3hgD/kyNhwLnhke8T3h3wZi+SCr0udNAbzoI6BrGpb1REaM08hYltaASVwFGI0F8wuWCWv"
"M5YsUNfOgzQGc6KOgazOhtN3vusTyOuPDSoGEjxrJQvieyaOc2Mye6+fE2kMEoqwVoSql/+q"
"r6y+BjIxSysM/hk/ft6bKHBUuelOASPyUunqv0Lq734gEMQ1/Ul4ddX8GNNhftTpoOrXAZDV"
"fwEARl+R5PF2qDxwLtNc32pV7KcFnXcOnQu6ZIEx2pPw6soZk8ZgxtQxUPWVAGT1zHJaO3md"
"LusTUwgKyVWMf7Ef8w7+cmVExtRlsyeNweypY6DqKwDI6n+W5LDevY0rAbV+jNEeXyGC9HJ+"
"vGL4ZSi2SqqLD4Q0BgdCHQNVXwpAVv/brkHtoMJeehx6BX9Gw3FoOnenM4LlLqtaL/jCgyKN"
"wUFRx0DVFxpDVi+2fnQb8vhv2TkCE3wBBbBXQ0fVorsf6rwDZEyHA6ROB1X/FABZveIO8JIu"
"nr9spGLo1S3mR/s8Y56N0odMGoNDpo6Bqs8BICuyHRq4u6qdYT5yFyzHj46+5aleOXzSGBw+"
"dQxU/QoAWV0Q8qMx8pan6+iE/SH1g2MhjcGxUMdA1T8AIKuLAdipoI4oYJke28uOv5Crnj8u"
"0hgcF3UMVP1dY8jqZ0Aw/3KilY9mwVH0x5pA6tljZEyHY6ROB1U/C0BWV4TzOl4KuHdonjlq"
"0hgcNXUMVP0MAFn9nKsrQYfmyUyaJTJ1llD1kwBk9Xrm2YC7Qeo/M2ksZOpYUPUTAGR1LYBu"
"wVJnM2kCyNQJQNX/MXUTZoBi6gZkzBWZOhkbIlMnY0Nk6mRsiP8H0trOAD1A8ycAAAAASUVO"
"RK5CYII=")
def rad2deg(x):
"""
Transforms radians into degrees.
:param `x`: a float representing an angle in radians.
"""
return 180.0*x/pi
def deg2rad(x):
"""
Transforms degrees into radians.
:param `x`: a float representing an angle in degrees.
"""
return x*pi/180.0
def toscale(x):
"""
Normalize a value as a function of the radius.
:param `x`: a float value to normalize
"""
return x*RADIUS/255.0
def scaletomax(x):
"""
Normalize a value as a function of the radius.
:param `x`: a float value to normalize
"""
return x*255.0/RADIUS
def rgb2html(colour):
"""
Transforms a RGB triplet into an html hex string.
:param `colour`: a tuple of red, green, blue integers.
"""
hexColour = "#%02x%02x%02x"%(colour.r, colour.g, colour.b)
return hexColour.upper()
def Slope(pt1, pt2):
"""
Calculates the slope of the line connecting 2 points.
:param `pt1`: an instance of `wx.Point`;
:param `pt2`: another instance of `wx.Point`.
"""
y = float(pt2.y - pt1.y)
x = float(pt2.x - pt1.x)
if x:
return y/x
else:
return None
def Intersection(line1, line2):
"""
Calculates the intersection point between 2 lines.
:param `line1`: an instance of L{LineDescription};
:param `line2`: another instance of L{LineDescription}.
"""
if line1.slope == line2.slope:
# Parallel lines, no intersection
return wx.Point(0, 0)
elif line1.slope is None:
# First Line is vertical, eqn is x=0
# Put x = 0 in second line eqn to get y
x = line1.x
y = line2.slope*x + line2.c
elif line2.slope is None:
# second line is vertical Equation of line is x=0
# Put x = 0 in first line eqn to get y
x = line2.x
y = line1.slope*line2.x + line1.c
else:
y = ((line1.c*line2.slope) - (line2.c*line1.slope))/(line2.slope - line1.slope)
x = (y - line1.c)/line1.slope
return wx.Point(int(x), int(y))
def FindC(line):
""" Internal function. """
if line.slope is None:
c = line.y
else:
c = line.y - line.slope*line.x
return c
def PointOnLine(pt1, pt2, length, maxLen):
""" Internal function. """
a = float(length)
if pt2.x != pt1.x:
m = float((pt2.y - pt1.y))/(pt2.x - pt1.x)
m2 = m*m
a2 = a*a
c = pt1.y - m*pt1.x
c2 = c*c
A = 1.0
x = pt1.x
B = 2.0 * pt1.x
x *= x
C = x - a2/(m2 + 1)
x = (B + sqrt(B*B - (4.0*A*C)))/(2.0*A)
y = m*x + c
pt = wx.Point(int(x), int(y))
if Distance(pt, pt1) > maxLen or Distance(pt, pt2) > maxLen:
x = (B - sqrt(B*B - (4.0*A*C)))/(2.0*A)
y = m*x + c
pt = wx.Point(int(x), int(y))
else:
a2 = a*a
y = sqrt(a2)
x = 0.0
pt = wx.Point(int(x), int(y))
pt.x += pt1.x
pt.y += pt1.y
if Distance(pt, pt1) > maxLen or Distance(pt, pt2) > maxLen:
y = -1.0*y
pt = wx.Point(int(x), int(y))
pt.x += pt1.x
pt.y += pt1.y
return pt
def Distance(pt1, pt2):
"""
Returns the distance between 2 points.
:param `pt1`: an instance of `wx.Point`;
:param `pt2`: another instance of `wx.Point`.
"""
distance = sqrt((pt1.x - pt2.x)**2.0 + (pt1.y - pt2.y)**2.0)
return int(distance)
def AngleFromPoint(pt, center):
"""
Returns the angle between the x-axis and the line connecting the center and
the point `pt`.
:param `pt`: an instance of `wx.Point`;
:param `center`: a float value representing the center.
"""
y = -1*(pt.y - center.y)
x = pt.x - center.x
if x == 0 and y == 0:
return 0.0
else:
return atan2(y, x)
def PtFromAngle(angle, sat, center):
"""
Given the angle with respect to the x-axis, returns the point based on
the saturation value.
:param `angle`: a float representing an angle;
:param `sat`: a float representing the colour saturation value;
:param `center`: a float value representing the center.
"""
angle = deg2rad(angle)
sat = toscale(sat)
x = sat*cos(angle)
y = sat*sin(angle)
pt = wx.Point(int(x), -int(y))
pt.x += center.x
pt.y += center.y
return pt
def RestoreOldDC(dc, oldPen, oldBrush, oldMode):
"""
Restores the old settings for a `wx.DC`.
:param `dc`: an instance of `wx.DC`;
:param `oldPen`: an instance of `wx.Pen`;
:param `oldBrush`: an instance of `wx.Brush`;
:param `oldMode`: the `wx.DC` drawing mode bit.
"""
dc.SetPen(oldPen)
dc.SetBrush(oldBrush)
dc.SetLogicalFunction(oldMode)
def DrawCheckerBoard(dc, rect, checkColour, box=5):
"""
Draws a checkerboard on a `wx.DC`.
:param `dc`: an instance of `wx.DC`;
:param `rect`: the client rectangle on which to draw the checkerboard;
:param `checkColour`: the colour used for the dark checkerboards;
:param `box`: the checkerboards box sizes.
:note: Used for the Alpha channel control and the colour panels.
"""
y = rect.y
checkPen = wx.Pen(checkColour)
checkBrush = wx.Brush(checkColour)
dc.SetPen(checkPen)
dc.SetBrush(checkBrush)
dc.SetClippingRect(rect)
while y < rect.height:
x = box*((y/box)%2) + 2
while x < rect.width:
dc.DrawRectangle(x, y, box, box)
x += box*2
y += box
class Colour(wx.Colour):
"""
This is a subclass of `wx.Colour`, which adds Hue, Saturation and Brightness
capability to the base class. It contains also methods to convert RGB triplets
into HSB triplets and vice-versa.
"""
def __init__(self, colour):
"""
Default class constructor.
:param `colour`: a standard `wx.Colour`.
"""
wx.Colour.__init__(self)
self.r = colour.Red()
self.g = colour.Green()
self.b = colour.Blue()
self._alpha = colour.Alpha()
self.ToHSV()
def ToRGB(self):
""" Converts a HSV triplet into a RGB triplet. """
maxVal = self.v
delta = (maxVal*self.s)/255.0
minVal = maxVal - delta
hue = float(self.h)
if self.h > 300 or self.h <= 60:
self.r = maxVal
if self.h > 300:
self.g = int(minVal)
hue = (hue - 360.0)/60.0
self.b = int(-(hue*delta - minVal))
else:
self.b = int(minVal)
hue = hue/60.0
self.g = int(hue*delta + minVal)
elif self.h > 60 and self.h < 180:
self.g = int(maxVal)
if self.h < 120:
self.b = int(minVal)
hue = (hue/60.0 - 2.0)*delta
self.r = int(minVal - hue)
else:
self.r = int(minVal)
hue = (hue/60.0 - 2.0)*delta
self.b = int(minVal + hue)
else:
self.b = int(maxVal)
if self.h < 240:
self.r = int(minVal)
hue = (hue/60.0 - 4.0)*delta
self.g = int(minVal - hue)
else:
self.g = int(minVal)
hue = (hue/60.0 - 4.0)*delta
self.r = int(minVal + hue)
def ToHSV(self):
""" Converts a RGB triplet into a HSV triplet. """
minVal = float(min(self.r, min(self.g, self.b)))
maxVal = float(max(self.r, max(self.g, self.b)))
delta = maxVal - minVal
self.v = int(maxVal)
if abs(delta) < 1e-6:
self.h = self.s = 0
else:
temp = delta/maxVal
self.s = int(temp*255.0)
if self.r == int(maxVal):
temp = float(self.g-self.b)/delta
elif self.g == int(maxVal):
temp = 2.0 + (float(self.b-self.r)/delta)
else:
temp = 4.0 + (float(self.r-self.g)/delta)
temp *= 60
if temp < 0:
temp += 360
elif temp >= 360.0:
temp = 0
self.h = int(temp)
def GetPyColour(self):
""" Returns the wxPython `wx.Colour` associated with this instance. """
return wx.Colour(self.r, self.g, self.b, self._alpha)
class LineDescription(object):
""" Simple class to store description and constants for a line in 2D space. """
def __init__(self, x=0, y=0, slope=None, c=None):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `x`: the x coordinate of the first point;
:param `y`: the y coordinate of the first point;
:param `slope`: the line's slope;
:param `c`: a floating point constant.
"""
self.x = x
self.y = y
self.slope = slope
self.c = c
class BasePyControl(wx.PyControl):
"""
Base class used to hold common code for the HSB colour wheel and the RGB
colour cube.
"""
def __init__(self, parent, bitmap=None):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent;
:param `bitmap`: the background bitmap for this custom control.
"""
wx.PyControl.__init__(self, parent, style=wx.NO_BORDER)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self._bitmap = bitmap
mask = wx.Mask(self._bitmap, wx.Colour(192, 192, 192))
self._bitmap.SetMask(mask)
self._mainDialog = wx.GetTopLevelParent(self)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{BasePyControl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.Brush(self.GetParent().GetBackgroundColour()))
dc.Clear()
dc.DrawBitmap(self._bitmap, 0, 0, True)
if self._mainDialog._initOver:
self.DrawMarkers(dc)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{BasePyControl}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def DrawMarkers(self, dc=None):
"""
Draws the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
:note: This method must be overridden in derived classes.
"""
pass
def DrawLines(self, dc):
"""
Draws the lines connecting the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
:note: This method must be overridden in derived classes.
"""
pass
def AcceptsFocusFromKeyboard(self):
"""
Can this window be given focus by keyboard navigation? If not, the
only way to give it focus (provided it accepts it at all) is to click
it.
:note: This method always returns ``False`` as we do not accept focus from
the keyboard.
:note: Overridden from `wx.PyControl`.
"""
return False
def AcceptsFocus(self):
"""
Can this window be given focus by mouse click?
:note: This method always returns ``False`` as we do not accept focus from
mouse click.
:note: Overridden from `wx.PyControl`.
"""
return False
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{BasePyControl}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method must be overridden in derived classes.
"""
pass
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{BasePyControl}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method must be overridden in derived classes.
"""
pass
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{BasePyControl}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method must be overridden in derived classes.
"""
pass
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{BasePyControl}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def DoGetBestSize(self):
""" Returns the custom control best size (used by sizers). """
return wx.Size(self._bitmap.GetWidth(), self._bitmap.GetHeight())
class RGBCube(BasePyControl):
"""
Implements the drawing, mouse handling and sizing routines for the RGB
cube colour.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BasePyControl.__init__(self, parent, bitmap=RGBCubeImage.GetBitmap())
self._index = -1
def DrawMarkers(self, dc=None):
"""
Draws the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.WHITE_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
rects = []
blueLen = self._mainDialog._blueLen
greenLen = self._mainDialog._greenLen
redLen = self._mainDialog._redLen
colour = self._mainDialog._colour
pt = [wx.Point() for i in xrange(3)]
pt[0] = PointOnLine(Vertex, Top, (colour.r*redLen)/255, redLen)
pt[1] = PointOnLine(Vertex, Left, (colour.g*greenLen)/255, greenLen)
pt[2] = PointOnLine(Vertex, Right, (colour.b*blueLen)/255, blueLen)
for i in xrange(3):
rect = wx.Rect(pt[i].x - RECT_WIDTH, pt[i].y - RECT_WIDTH, 2*RECT_WIDTH, 2*RECT_WIDTH)
rects.append(rect)
dc.DrawRectangleRect(rect)
self.DrawLines(dc)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
self._rects = rects
def DrawLines(self, dc):
"""
Draws the lines connecting the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
"""
cuboid = self._mainDialog._cuboid
dc.DrawLinePoint(cuboid[1], cuboid[2])
dc.DrawLinePoint(cuboid[2], cuboid[3])
dc.DrawLinePoint(cuboid[3], cuboid[4])
dc.DrawLinePoint(cuboid[4], cuboid[5])
dc.DrawLinePoint(cuboid[5], cuboid[2])
dc.DrawLinePoint(cuboid[5], cuboid[6])
dc.DrawLinePoint(cuboid[6], cuboid[7])
dc.DrawLinePoint(cuboid[7], cuboid[4])
dc.DrawLinePoint(cuboid[1], cuboid[6])
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{RGBCube}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
self._mouseIn = False
if self._rects[RED].Contains(point):
self.CaptureMouse()
self._mouseIn = True
self._index = RED
elif self._rects[GREEN].Contains(point):
self.CaptureMouse()
self._mouseIn = True
self._index = GREEN
elif self._rects[BLUE].Contains(point):
self.CaptureMouse()
self._mouseIn = True
self._index = BLUE
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{RGBCube}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.GetCapture():
self.ReleaseMouse()
self._mouseIn = False
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{RGBCube}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
if not (self.GetCapture() and self._mouseIn):
event.Skip()
return
bChange = False
mainDialog = self._mainDialog
colour = mainDialog._colour
redLen, greenLen, blueLen = mainDialog._redLen, mainDialog._greenLen, mainDialog._blueLen
dc = wx.ClientDC(self)
self.DrawMarkers(dc)
if self._index == RED:
if point.y > Vertex.y:
point.y = Vertex.y
point.x = Vertex.x
val = Distance(point, Vertex)
if val > redLen:
val = redLen
val = (float(val)/redLen)*255
colour.r = int(val)
pt = PointOnLine(Vertex, Top, (colour.r*redLen)/255, redLen)
self._rects[RED] = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
bChange = True
elif self._index == GREEN:
if point.x > Vertex.x:
point.x = Vertex.x
point.y = self._rects[GREEN].GetTop() + RECT_WIDTH
val = Distance(point, Vertex)
if val > greenLen:
val = greenLen
val = (float(val)/greenLen)*255
colour.g = int(val)
pt = PointOnLine(Vertex, Left, (colour.g*greenLen)/255, greenLen)
self._rects[GREEN] = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
bChange = True
elif self._index == BLUE:
if point.x < Vertex.x:
point.x = Vertex.x
point.y = self._rects[BLUE].GetTop() + RECT_WIDTH
val = Distance(point, Vertex)
if val > blueLen:
val = blueLen
val = (float(val)/blueLen)*255
colour.b = int(val)
pt = PointOnLine(Vertex, Right, (colour.b*blueLen)/255, blueLen)
self._rects[BLUE] = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
bChange = True
if bChange:
mainDialog.CalcCuboid()
self.DrawMarkers(dc)
colour.ToHSV()
mainDialog.SetSpinVals()
mainDialog.CalcRects()
mainDialog.DrawHSB()
mainDialog.DrawBright()
mainDialog.DrawAlpha()
class HSVWheel(BasePyControl):
"""
Implements the drawing, mouse handling and sizing routines for the HSV
colour wheel.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BasePyControl.__init__(self, parent, bitmap=HSVWheelImage.GetBitmap())
self._mouseIn = False
def DrawMarkers(self, dc=None):
"""
Draws the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.WHITE_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
dc.DrawRectangleRect(self._mainDialog._currentRect)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{HSVWheel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
self._mouseIn = False
if self.InCircle(point):
self._mouseIn = True
if self._mouseIn:
self.CaptureMouse()
self.TrackPoint(point)
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{HSVWheel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.GetCapture():
self.ReleaseMouse()
self._mouseIn = False
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{HSVWheel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
if self.GetCapture() and self._mouseIn:
self.TrackPoint(point)
def InCircle(self, pt):
"""
Returns whether a point is inside the HSV wheel or not.
:param `pt`: an instance of `wx.Point`.
"""
return Distance(pt, self._mainDialog._centre) <= RADIUS
def TrackPoint(self, pt):
"""
Track a mouse event inside the HSV colour wheel.
:param `pt`: an instance of `wx.Point`.
"""
if not self._mouseIn:
return
dc = wx.ClientDC(self)
self.DrawMarkers(dc)
mainDialog = self._mainDialog
colour = mainDialog._colour
colour.h = int(rad2deg(AngleFromPoint(pt, mainDialog._centre)))
if colour.h < 0:
colour.h += 360
colour.s = int(scaletomax(Distance(pt, mainDialog._centre)))
if colour.s > 255:
colour.s = 255
mainDialog.CalcRects()
self.DrawMarkers(dc)
colour.ToRGB()
mainDialog.SetSpinVals()
mainDialog.CalcCuboid()
mainDialog.DrawRGB()
mainDialog.DrawBright()
mainDialog.DrawAlpha()
class BaseLineCtrl(wx.PyControl):
"""
Base class used to hold common code for the Alpha channel control and the
brightness palette control.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
wx.PyControl.__init__(self, parent, size=(20, 200), style=wx.NO_BORDER)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self._mainDialog = wx.GetTopLevelParent(self)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{BaseLineCtrl}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{BaseLineCtrl}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
theRect = self.GetClientRect()
if not theRect.Contains(point):
event.Skip()
return
self.CaptureMouse()
self.TrackPoint(point)
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{BaseLineCtrl}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.GetCapture():
self.ReleaseMouse()
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{BaseLineCtrl}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
if self.GetCapture():
self.TrackPoint(point)
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{BaseLineCtrl}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def DoGetBestSize(self):
""" Returns the custom control best size (used by sizers). """
return wx.Size(24, 208)
def BuildRect(self):
""" Internal method. """
brightRect = wx.Rect(*self.GetClientRect())
brightRect.x += 2
brightRect.y += 6
brightRect.width -= 4
brightRect.height -= 8
return brightRect
def AcceptsFocusFromKeyboard(self):
"""
Can this window be given focus by keyboard navigation? If not, the
only way to give it focus (provided it accepts it at all) is to click
it.
:note: This method always returns ``False`` as we do not accept focus from
the keyboard.
:note: Overridden from `wx.PyControl`.
"""
return False
def AcceptsFocus(self):
"""
Can this window be given focus by mouse click?
:note: This method always returns ``False`` as we do not accept focus from
mouse click.
:note: Overridden from `wx.PyControl`.
"""
return False
class BrightCtrl(BaseLineCtrl):
"""
Implements the drawing, mouse handling and sizing routines for the brightness
palette control.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BaseLineCtrl.__init__(self, parent)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{BrightCtrl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.Brush(self.GetParent().GetBackgroundColour()))
dc.Clear()
colour = self._mainDialog._colour.GetPyColour()
brightRect = self.BuildRect()
target_red = colour.Red()
target_green = colour.Green()
target_blue = colour.Blue()
h, s, v = colorsys.rgb_to_hsv(target_red / 255.0, target_green / 255.0,
target_blue / 255.0)
v = 1.0
vstep = 1.0/(brightRect.height-1)
for y_pos in range(brightRect.y, brightRect.height+brightRect.y):
r, g, b = [c * 255.0 for c in colorsys.hsv_to_rgb(h, s, v)]
colour = wx.Colour(int(r), int(g), int(b))
dc.SetPen(wx.Pen(colour, 1, wx.SOLID))
dc.DrawRectangle(brightRect.x, y_pos, brightRect.width, 1)
v = v - vstep
dc.SetPen(wx.BLACK_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangleRect(brightRect)
self.DrawMarkers(dc)
def TrackPoint(self, pt):
"""
Tracks a mouse action inside the palette control.
:param `pt`: an instance of `wx.Point`.
"""
brightRect = self.BuildRect()
d = brightRect.GetBottom() - pt.y
d *= 255
d /= brightRect.height
if d < 0:
d = 0
if d > 255:
d = 255;
mainDialog = self._mainDialog
colour = mainDialog._colour
mainDialog.DrawMarkers()
colour.v = int(d)
colour.ToRGB()
mainDialog.SetSpinVals()
mainDialog.CalcRects()
mainDialog.CalcCuboid()
mainDialog.DrawMarkers()
mainDialog.DrawAlpha()
def DrawMarkers(self, dc=None):
"""
Draws square markers used with mouse gestures.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
colour = self._mainDialog._colour
brightRect = self.BuildRect()
y = int(colour.v/255.0*brightRect.height)
y = brightRect.GetBottom() - y
brightMark = wx.Rect(brightRect.x-2, y-4, brightRect.width+4, 8)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.Pen(wx.WHITE, 2))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
dc.DrawRectangleRect(brightMark)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
class AlphaCtrl(BaseLineCtrl):
"""
Implements the drawing, mouse handling and sizing routines for the alpha
channel control.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BaseLineCtrl.__init__(self, parent)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{AlphaCtrl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
pdc = wx.PaintDC(self)
dc = wx.GCDC(pdc)
mem_dc = wx.MemoryDC()
fullRect = self.GetClientRect()
bmp = wx.EmptyBitmap(fullRect.width, fullRect.height)
mem_dc.SelectObject(bmp)
rect = self.BuildRect()
backBrush = wx.Brush(self.GetParent().GetBackgroundColour())
mem_dc.SetBackground(backBrush)
mem_dc.Clear()
mem_dc.SetBrush(wx.WHITE_BRUSH)
mem_dc.DrawRectangleRect(rect)
DrawCheckerBoard(mem_dc, rect, checkColour)
self.DrawAlphaShading(mem_dc, rect)
mem_dc.DestroyClippingRegion()
self.DrawMarkers(mem_dc)
mem_dc.SetBrush(wx.TRANSPARENT_BRUSH)
mem_dc.SetPen(wx.BLACK_PEN)
mem_dc.DrawRectangleRect(rect)
mem_dc.SelectObject(wx.NullBitmap)
pdc.DrawBitmap(bmp, 0, 0)
def DrawAlphaShading(self, dc, rect):
"""
Draws the alpha shading on top of the checkerboard.
:param `dc`: an instance of `wx.DC`;
:param `rect`: the L{AlphaCtrl} client rectangle.
"""
gcdc = wx.GCDC(dc)
colour = self._mainDialog._colour.GetPyColour()
alpha = 255.0
vstep = 255.0*2/(rect.height-1)
r, g, b = colour.Red(), colour.Green(), colour.Blue()
colour_gcdc = wx.Colour(r, g, b, alpha)
gcdc.SetBrush(wx.TRANSPARENT_BRUSH)
for y_pos in range(rect.y, rect.height+rect.y, 2):
colour_gcdc = wx.Colour(r, g, b, int(alpha))
gcdc.SetPen(wx.Pen(colour_gcdc, 1, wx.SOLID))
gcdc.DrawRectangle(rect.x, y_pos, rect.width, 2)
alpha = alpha - vstep
def TrackPoint(self, pt):
"""
Tracks a mouse action inside the Alpha channel control.
:param `pt`: an instance of `wx.Point`.
"""
alphaRect = self.BuildRect()
d = alphaRect.GetBottom() - pt.y
d *= 255
d /= alphaRect.height
if d < 0:
d = 0
if d > 255:
d = 255
self._mainDialog._colour._alpha = int(d)
self.Refresh()
self._mainDialog.SetSpinVals()
def DrawMarkers(self, dc=None):
"""
Draws square markers used with mouse gestures.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
colour = self._mainDialog._colour
alphaRect = self.BuildRect()
y = int(colour._alpha/255.0*alphaRect.height)
y = alphaRect.GetBottom() - y
alphaMark = wx.Rect(alphaRect.x-2, y-4, alphaRect.width+4, 8)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.Pen(wx.WHITE, 2))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
dc.DrawRectangleRect(alphaMark)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
class ColourPanel(wx.PyPanel):
"""
Simple custom class used to display "old" and "new" colour panels, with alpha
blending capabilities.
"""
def __init__(self, parent, style=wx.SIMPLE_BORDER):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window;
:param `style`: the L{ColourPanel} window style.
"""
wx.PyPanel.__init__(self, parent, style=style)
self._mainDialog = wx.GetTopLevelParent(self)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_SIZE, self.OnSize)
self._colour = Colour(wx.WHITE)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{ColourPanel}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
pdc = wx.PaintDC(self)
dc = wx.GCDC(pdc)
mem_dc = wx.MemoryDC()
rect = self.GetClientRect()
bmp = wx.EmptyBitmap(rect.width, rect.height)
mem_dc.SelectObject(bmp)
backBrush = wx.Brush(self.GetParent().GetBackgroundColour())
mem_dc.SetBackground(backBrush)
mem_dc.Clear()
mem_dc.SetBrush(wx.WHITE_BRUSH)
mem_dc.DrawRectangleRect(rect)
DrawCheckerBoard(mem_dc, rect, checkColour, box=10)
gcdc = wx.GCDC(mem_dc)
colour_gcdc = wx.Colour(self._colour.r, self._colour.g, self._colour.b, self._colour._alpha)
gcdc.SetBrush(wx.Brush(colour_gcdc))
gcdc.SetPen(wx.Pen(colour_gcdc))
gcdc.DrawRectangleRect(rect)
mem_dc.SelectObject(wx.NullBitmap)
dc.DrawBitmap(bmp, 0, 0)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{ColourPanel}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{ColourPanel}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def RefreshColour(self, colour):
"""
Refresh the panel after a colour/alpha change.
:param `colour`: the new background colour of L{ColourPanel}.
"""
self._colour = colour
self.Refresh()
def AcceptsFocusFromKeyboard(self):
"""
Can this window be given focus by keyboard navigation? If not, the
only way to give it focus (provided it accepts it at all) is to click
it.
:note: This method always returns ``False`` as we do not accept focus from
the keyboard.
:note: Overridden from `wx.PyPanel`.
"""
return False
def AcceptsFocus(self):
"""
Can this window be given focus by mouse click?
:note: This method always returns ``False`` as we do not accept focus from
mouse click.
:note: Overridden from `wx.PyPanel`.
"""
return False
class CustomPanel(wx.PyControl):
"""
This panel displays a series of cutom colours (chosen by the user) just like
the standard `wx.ColourDialog`.
"""
def __init__(self, parent, colourData):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window;
:param `colourData`: an instance of `wx.ColourData`.
"""
wx.PyControl.__init__(self, parent, style=wx.NO_BORDER)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self._colourData = colourData
self._customColours = [None]*16
self._mainDialog = wx.GetTopLevelParent(self)
self.InitializeColours()
self._smallRectangleSize = wx.Size(20, 16)
self._gridSpacing = 4
self._customColourRect = wx.Rect(2, 2, (8*self._smallRectangleSize.x) + (7*self._gridSpacing),
(2*self._smallRectangleSize.y) + (1*self._gridSpacing))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
def InitializeColours(self):
""" Initializes the 16 custom colours in L{CustomPanel}. """
curr = self._colourData.GetColour()
self._colourSelection = -1
for i in xrange(16):
c = self._colourData.GetCustomColour(i)
if c.Ok():
self._customColours[i] = self._colourData.GetCustomColour(i)
else:
self._customColours[i] = wx.Colour(255, 255, 255)
if c == curr:
self._colourSelection = i
def DoGetBestSize(self):
""" Returns the custom control best size (used by sizers). """
return self._customColourRect.width+4, self._customColourRect.height+4
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{CustomPanel}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.Brush(self.GetParent().GetBackgroundColour()))
dc.Clear()
self.PaintCustomColours(dc)
self.PaintHighlight(dc, True)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{CustomPanel}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{CustomPanel}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{CustomPanel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
x, y = event.GetX(), event.GetY()
selX = (x - self._customColourRect.x)/(self._smallRectangleSize.x + self._gridSpacing)
selY = (y - self._customColourRect.y)/(self._smallRectangleSize.y + self._gridSpacing)
ptr = selX + selY*8
dc = wx.ClientDC(self)
self.PaintHighlight(dc, False)
self._colourSelection = ptr
self._mainDialog._colour = Colour(self._customColours[self._colourSelection])
self.PaintCustomColour(dc, selX, selY)
self.PaintHighlight(dc, True)
self._mainDialog.DrawAll()
def PaintCustomColours(self, dc):
"""
Draws all the 16 subpanels with their custom colours.
:param `dc`: an instance of `wx.DC`.
"""
for i in xrange(2):
for j in xrange(8):
ptr = i*8 + j
x = (j*(self._smallRectangleSize.x+self._gridSpacing)) + self._customColourRect.x
y = (i*(self._smallRectangleSize.y+self._gridSpacing)) + self._customColourRect.y
dc.SetPen(wx.BLACK_PEN)
brush = wx.Brush(self._customColours[ptr])
dc.SetBrush(brush)
dc.DrawRectangle(x, y, self._smallRectangleSize.x, self._smallRectangleSize.y)
def PaintHighlight(self, dc, draw=True):
"""
Highlight the current custom colour selection (if any).
:param `dc`: an instance of `wx.DC`;
:param `draw`: whether to draw a thin black border around the selected custom
colour or not.
"""
if self._colourSelection < 0:
return
# Number of pixels bigger than the standard rectangle size
# for drawing a highlight
deltaX = deltaY = 2
# User-defined colours
y = self._colourSelection/8
x = self._colourSelection - (y*8)
x = (x*(self._smallRectangleSize.x + self._gridSpacing) + self._customColourRect.x) - deltaX
y = (y*(self._smallRectangleSize.y + self._gridSpacing) + self._customColourRect.y) - deltaY
if draw:
dc.SetPen(wx.BLACK_PEN)
else:
dc.SetPen(wx.LIGHT_GREY_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(x, y, (self._smallRectangleSize.x + (2*deltaX)), (self._smallRectangleSize.y + (2*deltaY)))
def PaintCustomColour(self, dc, selX, selY):
"""
Paints a newly added custom colour subpanel.
:param `dc`: an instance of `wx.DC`;
:param `selX`: the x coordinate of the custom colour subpanel;
:param `selY`: the y coordinate of the custom colour subpanel.
"""
dc.SetPen(wx.BLACK_PEN)
brush = wx.Brush(self._customColours[self._colourSelection])
dc.SetBrush(brush)
ptr = selX*8 + selY
x = (selX*(self._smallRectangleSize.x+self._gridSpacing)) + self._customColourRect.x
y = (selY*(self._smallRectangleSize.y+self._gridSpacing)) + self._customColourRect.y
dc.DrawRectangle(x, y, self._smallRectangleSize.x, self._smallRectangleSize.y)
dc.SetBrush(wx.NullBrush)
def AddCustom(self, colour):
"""
Adds a user-chosen colour to the list of custom colours.
:param `colour`: an instance of `wx.Colour`.
"""
self._colourSelection += 1
self._colourSelection = self._colourSelection%16
dc = wx.ClientDC(self)
self._customColours[self._colourSelection] = colour.GetPyColour()
self._colourData.SetCustomColour(self._colourSelection, self._customColours[self._colourSelection])
self.PaintCustomColours(dc)
class CubeColourDialog(wx.Dialog):
"""
This is the CubeColourDialog main class implementation.
"""
def __init__(self, parent, colourData=None, agwStyle=CCD_SHOW_ALPHA):
"""
Default class constructor.
:param `colourData`: a standard `wx.ColourData` (as used in `wx.ColourDialog`);
:param `agwStyle`: can be either ``None`` or ``CCD_SHOW_ALPHA``, depending if you want
to hide the alpha channel control or not.
"""
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title=_("CubeColourDialog: Choose Colour"),
pos=wx.DefaultPosition, size=(900, 900), style=wx.DEFAULT_DIALOG_STYLE)
if colourData:
self._colourData = colourData
else:
self._colourData = wx.ColourData()
self._colourData.SetColour(wx.Colour(128, 128, 128))
self._colour = Colour(self._colourData.GetColour())
self._oldColour = Colour(self._colourData.GetColour())
self._inMouse = False
self._initOver = False
self._inDrawAll = False
self._agwStyle = agwStyle
self.mainPanel = wx.Panel(self, -1)
self.hsvSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "HSB")
self.rgbValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "RGB Values")
self.hsvValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "HSB Values")
self.rgbSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "RGB")
self.alphaSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Alpha")
self.alphaValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Alpha")
self.rgbBitmap = RGBCube(self.mainPanel)
self.hsvBitmap = HSVWheel(self.mainPanel)
self.brightCtrl = BrightCtrl(self.mainPanel)
self.alphaCtrl = AlphaCtrl(self.mainPanel)
self.showAlpha = wx.CheckBox(self.mainPanel, -1, "Show Alpha Control")
self.customColours = CustomPanel(self.mainPanel, self._colourData)
self.addCustom = wx.Button(self.mainPanel, -1, "Add to custom colours")
self.okButton = wx.Button(self.mainPanel, -1, "Ok")
self.cancelButton = wx.Button(self.mainPanel, -1, "Cancel")
self.oldColourPanel = ColourPanel(self.mainPanel, style=wx.SIMPLE_BORDER)
self.newColourPanel = ColourPanel(self.mainPanel, style=wx.SIMPLE_BORDER)
self.redSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.greenSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.blueSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.hueSpin = wx.SpinCtrl(self.mainPanel, -1, "0", min=0, max=359,
style=wx.SP_ARROW_KEYS)
self.saturationSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.brightnessSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.alphaSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.accessCode = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.htmlCode = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.webSafe = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.htmlName = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.SetProperties()
self.DoLayout()
self.spinCtrls = [self.redSpin, self.greenSpin, self.blueSpin,
self.hueSpin, self.saturationSpin, self.brightnessSpin]
for spin in self.spinCtrls:
spin.Bind(wx.EVT_SPINCTRL, self.OnSpinCtrl)
self.Bind(wx.EVT_SPINCTRL, self.OnAlphaSpin, self.alphaSpin)
self.Bind(wx.EVT_BUTTON, self.OnOk, self.okButton)
self.Bind(wx.EVT_BUTTON, self.OnCancel, self.cancelButton)
self.Bind(wx.EVT_BUTTON, self.OnAddCustom, self.addCustom)
self.Bind(wx.EVT_CHECKBOX, self.OnShowAlpha)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Bind(wx.EVT_CHAR_HOOK, self.OnKeyUp)
self.Centre(wx.BOTH)
wx.CallAfter(self.InitDialog)
def SetProperties(self):
""" Sets some initial properties for L{CubeColourDialog} (sizes, values). """
self.okButton.SetDefault()
self.oldColourPanel.SetMinSize((-1, 50))
self.newColourPanel.SetMinSize((-1, 50))
self.redSpin.SetMinSize((60, -1))
self.greenSpin.SetMinSize((60, -1))
self.blueSpin.SetMinSize((60, -1))
self.hueSpin.SetMinSize((60, -1))
self.saturationSpin.SetMinSize((60, -1))
self.brightnessSpin.SetMinSize((60, -1))
self.alphaSpin.SetMinSize((60, -1))
self.showAlpha.SetValue(1)
self.accessCode.SetInitialSize((80, -1))
self.webSafe.SetInitialSize((80, -1))
self.htmlCode.SetInitialSize((80, -1))
def DoLayout(self):
""" Layouts all the controls in the L{CubeColourDialog}. """
dialogSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer = wx.GridBagSizer(10, 5)
hsvValueSizer = wx.StaticBoxSizer(self.hsvValueSizer_staticbox, wx.VERTICAL)
hsvGridSizer = wx.GridSizer(2, 3, 2, 10)
rgbValueSizer = wx.StaticBoxSizer(self.rgbValueSizer_staticbox, wx.HORIZONTAL)
rgbGridSizer = wx.GridSizer(2, 3, 2, 10)
alphaValueSizer = wx.StaticBoxSizer(self.alphaValueSizer_staticbox, wx.VERTICAL)
alphaGridSizer = wx.BoxSizer(wx.VERTICAL)
customSizer = wx.BoxSizer(wx.VERTICAL)
buttonSizer = wx.BoxSizer(wx.VERTICAL)
accessSizer = wx.BoxSizer(wx.VERTICAL)
panelSizer = wx.BoxSizer(wx.VERTICAL)
htmlSizer1 = wx.BoxSizer(wx.HORIZONTAL)
htmlSizer2 = wx.BoxSizer(wx.VERTICAL)
htmlSizer_a = wx.BoxSizer(wx.VERTICAL)
htmlSizer_b = wx.BoxSizer(wx.VERTICAL)
hsvSizer = wx.StaticBoxSizer(self.hsvSizer_staticbox, wx.HORIZONTAL)
rgbSizer = wx.StaticBoxSizer(self.rgbSizer_staticbox, wx.VERTICAL)
alphaSizer = wx.StaticBoxSizer(self.alphaSizer_staticbox, wx.VERTICAL)
mainSizer.Add(self.showAlpha, (0, 0), (1, 1), wx.LEFT|wx.TOP, 10)
htmlLabel1 = wx.StaticText(self.mainPanel, -1, "HTML Code")
htmlLabel2 = wx.StaticText(self.mainPanel, -1, "Web Safe")
htmlSizer_a.Add(htmlLabel1, 0, wx.TOP, 3)
htmlSizer_b.Add(htmlLabel2, 0, wx.TOP, 3)
htmlSizer_a.Add(self.htmlCode, 0, wx.TOP, 3)
htmlSizer_b.Add(self.webSafe, 0, wx.TOP, 3)
htmlSizer1.Add(htmlSizer_a, 0)
htmlSizer1.Add(htmlSizer_b, 0, wx.LEFT, 10)
mainSizer.Add(htmlSizer1, (1, 0), (1, 1), wx.LEFT|wx.RIGHT, 10)
htmlLabel3 = wx.StaticText(self.mainPanel, -1, "HTML Name")
htmlSizer2.Add(htmlLabel3, 0, wx.TOP|wx.BOTTOM, 3)
htmlSizer2.Add(self.htmlName, 0)
mainSizer.Add(htmlSizer2, (1, 1), (1, 1), wx.LEFT|wx.RIGHT, 10)
customLabel = wx.StaticText(self.mainPanel, -1, "Custom Colours")
customSizer.Add(customLabel, 0, wx.BOTTOM, 3)
customSizer.Add(self.customColours, 0)
customSizer.Add(self.addCustom, 0, wx.TOP|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL, 5)
mainSizer.Add(customSizer, (0, 2), (2, 2), wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT, 5)
rgbSizer.Add(self.rgbBitmap, 0, wx.ALL, 15)
mainSizer.Add(rgbSizer, (2, 0), (1, 1), wx.ALL|wx.EXPAND, 10)
hsvSizer.Add(self.hsvBitmap, 0, wx.ALL, 15)
hsvSizer.Add(self.brightCtrl, 0, wx.RIGHT|wx.TOP|wx.BOTTOM, 15)
mainSizer.Add(hsvSizer, (2, 1), (1, 1), wx.ALL|wx.EXPAND, 10)
alphaSizer.Add(self.alphaCtrl, 0, wx.TOP|wx.ALIGN_CENTER, 15)
mainSizer.Add(alphaSizer, (2, 2), (1, 1), wx.ALL|wx.EXPAND, 10)
oldLabel = wx.StaticText(self.mainPanel, -1, "Old Colour")
panelSizer.Add(oldLabel, 0, wx.BOTTOM, 3)
panelSizer.Add(self.oldColourPanel, 0, wx.BOTTOM|wx.EXPAND, 20)
newLabel = wx.StaticText(self.mainPanel, -1, "New Colour")
accessLabel = wx.StaticText(self.mainPanel, -1, "MS Access Code")
accessSizer.Add(accessLabel, 0, wx.BOTTOM, 3)
accessSizer.Add(self.accessCode, 0)
panelSizer.Add(newLabel, 0, wx.BOTTOM, 3)
panelSizer.Add(self.newColourPanel, 0, wx.EXPAND)
panelSizer.Add((0, 0), 1, wx.EXPAND)
panelSizer.Add(accessSizer, 0, wx.TOP, 5)
mainSizer.Add(panelSizer, (2, 3), (1, 1), wx.ALL|wx.EXPAND, 10)
redLabel = wx.StaticText(self.mainPanel, -1, "Red")
rgbGridSizer.Add(redLabel, 0)
greenLabel = wx.StaticText(self.mainPanel, -1, "Green")
rgbGridSizer.Add(greenLabel, 0)
blueLabel = wx.StaticText(self.mainPanel, -1, "Blue")
rgbGridSizer.Add(blueLabel, 0)
rgbGridSizer.Add(self.redSpin, 0, wx.EXPAND)
rgbGridSizer.Add(self.greenSpin, 0, wx.EXPAND)
rgbGridSizer.Add(self.blueSpin, 0, wx.EXPAND)
rgbValueSizer.Add(rgbGridSizer, 1, 0, 0)
mainSizer.Add(rgbValueSizer, (3, 0), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
hueLabel = wx.StaticText(self.mainPanel, -1, "Hue")
hsvGridSizer.Add(hueLabel, 0)
saturationLabel = wx.StaticText(self.mainPanel, -1, "Saturation")
hsvGridSizer.Add(saturationLabel, 0)
brightnessLabel = wx.StaticText(self.mainPanel, -1, "Brightness")
hsvGridSizer.Add(brightnessLabel, 0)
hsvGridSizer.Add(self.hueSpin, 0, wx.EXPAND)
hsvGridSizer.Add(self.saturationSpin, 0, wx.EXPAND)
hsvGridSizer.Add(self.brightnessSpin, 0, wx.EXPAND)
hsvValueSizer.Add(hsvGridSizer, 1, wx.EXPAND)
mainSizer.Add(hsvValueSizer, (3, 1), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
alphaLabel = wx.StaticText(self.mainPanel, -1, "Alpha")
alphaGridSizer.Add(alphaLabel, 0)
alphaGridSizer.Add(self.alphaSpin, 0, wx.EXPAND|wx.TOP, 10)
alphaValueSizer.Add(alphaGridSizer, 1, wx.EXPAND)
mainSizer.Add(alphaValueSizer, (3, 2), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
buttonSizer.Add(self.okButton, 0, wx.BOTTOM, 3)
buttonSizer.Add(self.cancelButton, 0)
mainSizer.Add(buttonSizer, (3, 3), (1, 1), wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT, 5)
self.mainPanel.SetAutoLayout(True)
self.mainPanel.SetSizer(mainSizer)
mainSizer.Fit(self.mainPanel)
mainSizer.SetSizeHints(self.mainPanel)
if self.GetAGWWindowStyleFlag() & CCD_SHOW_ALPHA == 0:
mainSizer.Hide(self.showAlpha)
mainSizer.Hide(alphaSizer)
mainSizer.Hide(alphaValueSizer)
dialogSizer.Add(self.mainPanel, 1, wx.EXPAND)
self.SetAutoLayout(True)
self.SetSizer(dialogSizer)
dialogSizer.Fit(self)
dialogSizer.SetSizeHints(self)
self.Layout()
self.mainSizer = mainSizer
self.dialogSizer = dialogSizer
self.alphaSizers = [alphaSizer, alphaValueSizer]
def InitDialog(self):
""" Initialize the L{CubeColourDialog}. """
hsvRect = self.hsvBitmap.GetClientRect()
self._centre = wx.Point(hsvRect.x + hsvRect.width/2, hsvRect.y + hsvRect.height/2)
self._redLen = Distance(Vertex, Top)
self._greenLen = Distance(Vertex, Left)
self._blueLen = Distance(Vertex, Right)
self.CalcSlopes()
self.CalcCuboid()
self.CalcRects()
self.SetSpinVals()
self._initOver = True
wx.CallAfter(self.Refresh)
def CalcSlopes(self):
""" Calculates the line slopes in the RGB colour cube. """
self._lines = {RED: LineDescription(), GREEN: LineDescription(), BLUE: LineDescription}
self._lines[RED].slope = Slope(Top, Vertex)
self._lines[GREEN].slope = Slope(Left, Vertex)
self._lines[BLUE].slope = Slope(Right, Vertex)
for i in xrange(3):
self._lines[i].x = Vertex.x
self._lines[i].y = Vertex.y
self._lines[i].c = FindC(self._lines[i])
def CalcCuboid(self):
""" Calculates the RGB colour cube vertices. """
rLen = (self._colour.r*self._redLen)/255.0
gLen = (self._colour.g*self._greenLen)/255.0
bLen = (self._colour.b*self._blueLen)/255.0
lines = [LineDescription() for i in xrange(12)]
self._cuboid = [None]*8
self._cuboid[0] = Vertex
self._cuboid[1] = PointOnLine(Vertex, Top, int(rLen), self._redLen)
self._cuboid[3] = PointOnLine(Vertex, Left, int(gLen), self._greenLen)
self._cuboid[7] = PointOnLine(Vertex, Right, int(bLen), self._blueLen)
lines[0] = self._lines[RED]
lines[1] = self._lines[GREEN]
lines[2] = self._lines[BLUE]
lines[3].slope = self._lines[GREEN].slope
lines[3].x = self._cuboid[1].x
lines[3].y = self._cuboid[1].y
lines[3].c = FindC(lines[3])
lines[4].slope = self._lines[RED].slope
lines[4].x = self._cuboid[3].x
lines[4].y = self._cuboid[3].y
lines[4].c = FindC(lines[4])
lines[5].slope = self._lines[BLUE].slope
lines[5].x = self._cuboid[3].x
lines[5].y = self._cuboid[3].y
lines[5].c = FindC(lines[5])
lines[6].slope = self._lines[GREEN].slope
lines[6].x = self._cuboid[7].x
lines[6].y = self._cuboid[7].y
lines[6].c = FindC(lines[6])
lines[10].slope = self._lines[BLUE].slope
lines[10].x = self._cuboid[1].x
lines[10].y = self._cuboid[1].y
lines[10].c = FindC(lines[10])
lines[11].slope = self._lines[RED].slope
lines[11].x = self._cuboid[7].x
lines[11].y = self._cuboid[7].y
lines[11].c = FindC(lines[11])
self._cuboid[2] = Intersection(lines[3], lines[4])
self._cuboid[4] = Intersection(lines[5], lines[6])
self._cuboid[6] = Intersection(lines[10], lines[11])
lines[7].slope = self._lines[RED].slope
lines[7].x = self._cuboid[4].x
lines[7].y = self._cuboid[4].y
lines[7].c = FindC(lines[7])
lines[8].slope = self._lines[BLUE].slope
lines[8].x = self._cuboid[2].x
lines[8].y = self._cuboid[2].y
lines[8].c = FindC(lines[8])
self._cuboid[5] = Intersection(lines[7], lines[8])
def CalcRects(self):
""" Calculates the brightness control user-selected rect. """
pt = PtFromAngle(self._colour.h, self._colour.s, self._centre)
self._currentRect = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
def DrawMarkers(self, dc=None):
"""
Draws the markers for all the controls.
:param `dc`: an instance of `wx.DC`. If `dc` is ``None``, a `wx.ClientDC` is
created on the fly.
"""
if dc is None:
dc = wx.ClientDC(self)
self.hsvBitmap.DrawMarkers()
self.rgbBitmap.DrawMarkers()
self.brightCtrl.DrawMarkers()
def DrawRGB(self):
""" Refreshes the RGB colour cube. """
self.rgbBitmap.Refresh()
def DrawHSB(self):
""" Refreshes the HSB colour wheel. """
self.hsvBitmap.Refresh()
def DrawBright(self):
""" Refreshes the brightness control. """
self.brightCtrl.Refresh()
def DrawAlpha(self):
""" Refreshes the alpha channel control. """
self.alphaCtrl.Refresh()
def SetSpinVals(self):
""" Sets the values for all the spin controls. """
self.redSpin.SetValue(self._colour.r)
self.greenSpin.SetValue(self._colour.g)
self.blueSpin.SetValue(self._colour.b)
self.hueSpin.SetValue(self._colour.h)
self.saturationSpin.SetValue(self._colour.s)
self.brightnessSpin.SetValue(self._colour.v)
self.alphaSpin.SetValue(self._colour._alpha)
self.SetPanelColours()
self.SetCodes()
def SetPanelColours(self):
""" Assigns colours to the colour panels. """
self.oldColourPanel.RefreshColour(self._oldColour)
self.newColourPanel.RefreshColour(self._colour)
def SetCodes(self):
""" Sets the HTML/MS Access codes (if any) in the text controls. """
colour = rgb2html(self._colour)
self.htmlCode.SetValue(colour)
self.htmlCode.Refresh()
if colour in HTMLCodes:
colourName, access, webSafe = HTMLCodes[colour]
self.webSafe.SetValue(webSafe)
self.accessCode.SetValue(access)
self.htmlName.SetValue(colourName)
else:
self.webSafe.SetValue("")
self.accessCode.SetValue("")
self.htmlName.SetValue("")
def OnCloseWindow(self, event):
"""
Handles the ``wx.EVT_CLOSE`` event for L{CubeColourDialog}.
:param `event`: a `wx.CloseEvent` event to be processed.
"""
self.EndModal(wx.ID_CANCEL)
def OnKeyUp(self, event):
"""
Handles the ``wx.EVT_CHAR_HOOK`` event for L{CubeColourDialog}.
:param `event`: a `wx.KeyEvent` event to be processed.
"""
if event.GetKeyCode() == wx.WXK_ESCAPE:
self.EndModal(wx.ID_CANCEL)
event.Skip()
def ShowModal(self):
"""
Shows L{CubeColourDialog} as a modal dialog. Program flow does
not return until the dialog has been dismissed with `EndModal`.
:note: Overridden from `wx.Dialog`.
"""
return wx.Dialog.ShowModal(self)
def SetAGWWindowStyleFlag(self, agwStyle):
"""
Sets the L{CubeColourDialog} window style flags.
:param `agwStyle`: can only be ``CCD_SHOW_ALPHA`` or ``None``.
"""
show = self.GetAGWWindowStyleFlag() & CCD_SHOW_ALPHA
self._agwStyle = agwStyle
self.mainSizer.Show(self.alphaSizers[0], show)
self.mainSizer.Show(self.alphaSizers[1], show)
self.mainSizer.Fit(self.mainPanel)
self.mainSizer.SetSizeHints(self.mainPanel)
self.mainSizer.Layout()
self.dialogSizer.Fit(self)
self.dialogSizer.SetSizeHints(self)
self.Layout()
self.Refresh()
self.Update()
def GetAGWWindowStyleFlag(self):
"""
Returns the L{CubeColourDialog} window style flags.
:see: L{SetAGWWindowStyleFlag} for a list of possible flags.
"""
return self._agwStyle
def OnOk(self, event):
"""
Handles the Ok ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.EndModal(wx.ID_OK)
def OnCancel(self, event):
"""
Handles the Cancel ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.OnCloseWindow(event)
def OnAddCustom(self, event):
"""
Handles the Add Custom ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.customColours.AddCustom(self._colour)
def OnShowAlpha(self, event):
"""
Shows/hides the alpha channel control in L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
agwStyle = self.GetAGWWindowStyleFlag()
show = event.IsChecked()
if show:
agwStyle |= CCD_SHOW_ALPHA
else:
agwStyle &= ~CCD_SHOW_ALPHA
self.SetAGWWindowStyleFlag(agwStyle)
def OnSpinCtrl(self, event):
"""
Handles the ``wx.EVT_SPINCTRL`` event for RGB and HSB colours.
:param `event`: a `wx.SpinEvent` event to be processed.
"""
obj = event.GetEventObject()
position = self.spinCtrls.index(obj)
colourVal = event.GetInt()
attribute, maxVal = colourAttributes[position], colourMaxValues[position]
self.AssignColourValue(attribute, colourVal, maxVal, position)
def OnAlphaSpin(self, event):
"""
Handles the ``wx.EVT_SPINCTRL`` event for the alpha channel.
:param `event`: a `wx.SpinEvent` event to be processed.
"""
colourVal = event.GetInt()
originalVal = self._colour._alpha
if colourVal != originalVal and self._initOver:
if colourVal < 0:
colourVal = 0
if colourVal > 255:
colourVal = 255
self._colour._alpha = colourVal
self.DrawAlpha()
def AssignColourValue(self, attribute, colourVal, maxVal, position):
""" Common code to handle spin control changes. """
originalVal = getattr(self._colour, attribute)
if colourVal != originalVal and self._initOver:
if colourVal < 0:
colourVal = 0
if colourVal > maxVal:
colourVal = maxVal
setattr(self._colour, attribute, colourVal)
if position < 3:
self._colour.ToHSV()
else:
self._colour.ToRGB()
self.DrawAll()
def DrawAll(self):
""" Draws all the custom controls after a colour change. """
if self._initOver and not self._inDrawAll:
self._inDrawAll = True
dc1 = wx.ClientDC(self.hsvBitmap)
self.hsvBitmap.DrawMarkers(dc1)
dc2 = wx.ClientDC(self.rgbBitmap)
self.rgbBitmap.DrawMarkers(dc2)
self.rgbBitmap.DrawLines(dc2)
dc3 = wx.ClientDC(self.brightCtrl)
self.brightCtrl.DrawMarkers(dc3)
dc4 = wx.ClientDC(self.alphaCtrl)
self.alphaCtrl.DrawMarkers(dc4)
self.CalcCuboid()
self.CalcRects()
self.DrawRGB()
self.DrawHSB()
self.DrawBright()
self.DrawAlpha()
self.SetSpinVals()
self._inDrawAll = False
def GetColourData(self):
""" Returns a wxPython compatible `wx.ColourData`. """
self._colourData.SetColour(self._colour.GetPyColour())
return self._colourData
def GetRGBAColour(self):
""" Returns a 4-elements tuple of red, green, blue, alpha components. """
return (self._colour.r, self._colour.g, self._colour.b, self._colour._alpha)
def GetHSVAColour(self):
""" Returns a 4-elements tuple of hue, saturation, brightness, alpha components. """
return (self._colour.h, self._colour.s, self._colour.v, self._colour._alpha)
|
ezequielpereira/Time-Line
|
libs64/wx/lib/agw/cubecolourdialog.py
|
Python
|
gpl-3.0
| 139,714
|
# Copyright(C) 2014 by Abe developers.
# test_btc200.py: test Abe loading through Bitcoin Block 200.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import pytest
from db import testdb
import Abe.util
import Abe.Chain
@pytest.fixture(scope="module")
def btc200(testdb):
btc_chain = Abe.Chain.create('Bitcoin')
blocks = []
for hex in _blocks():
ds = Abe.util.str_to_ds(hex.decode('hex'))
hash = btc_chain.ds_block_header_hash(ds)
b = btc_chain.ds_parse_block(ds)
b['hash'] = hash
blocks.append(b)
store = testdb.store
btc_chain = store.get_chain_by_name(btc_chain.name)
for b in blocks:
store.import_block(b, chain = btc_chain)
return store
def test_block_number(btc200):
assert btc200.get_block_number(1) == 200
@pytest.fixture(scope="module")
def coinbase_200(btc200):
return btc200.export_tx(tx_hash = '2b1f06c2401d3b49a33c3f5ad5864c0bc70044c4068f9174546f3cfc1887d5ba')
def test_coinbase_hash(coinbase_200):
assert coinbase_200['hash'] == '2b1f06c2401d3b49a33c3f5ad5864c0bc70044c4068f9174546f3cfc1887d5ba'
def test_coinbase_in(coinbase_200):
assert len(coinbase_200['in']) == 1
assert coinbase_200['vin_sz'] == 1
def test_coinbase_lock_time(coinbase_200):
assert coinbase_200['lock_time'] == 0
def test_coinbase_prev_out(coinbase_200):
assert coinbase_200['in'][0]['prev_out'] == {
"hash": "0000000000000000000000000000000000000000000000000000000000000000",
"n": 4294967295
}
def test_coinbase_raw_scriptSig(coinbase_200):
assert coinbase_200['in'][0]['raw_scriptSig'] == "04ffff001d0138"
def test_coinbase_out(coinbase_200):
assert len(coinbase_200['out']) == 1
assert coinbase_200['vout_sz'] == 1
def test_coinbase_raw_scriptPubKey(coinbase_200):
assert coinbase_200['out'][0]['raw_scriptPubKey'] == \
"41045e071dedd1ed03721c6e9bba28fc276795421a378637fb41090192bb9f208630dcbac5862a3baeb9df3ca6e4e256b7fd2404824c20198ca1b004ee2197866433ac"
def test_coinbase_value(coinbase_200):
assert coinbase_200['out'][0]['value'] == "50.00000000"
def test_coinbase_size(coinbase_200):
assert coinbase_200['size'] == 134
def test_coinbase_ver(coinbase_200):
assert coinbase_200['ver'] == 1
@pytest.fixture(scope="module")
def b182t1(btc200):
return btc200.export_tx(
tx_hash = '591e91f809d716912ca1d4a9295e70c3e78bab077683f79350f101da64588073',
format = 'browser')
def test_tx_hash(b182t1):
assert b182t1['hash'] == '591e91f809d716912ca1d4a9295e70c3e78bab077683f79350f101da64588073'
def test_tx_version(b182t1):
assert b182t1['version'] == 1
def test_tx_lockTime(b182t1):
assert b182t1['lockTime'] == 0
def test_tx_size(b182t1):
assert b182t1['size'] == 275
def test_tx_cc(b182t1):
assert len(b182t1['chain_candidates']) == 1
def test_tx_chain_name(b182t1):
assert b182t1['chain_candidates'][0]['chain'].name == 'Bitcoin'
def test_tx_in_longest(b182t1):
assert b182t1['chain_candidates'][0]['in_longest']
def test_tx_block_nTime(b182t1):
assert b182t1['chain_candidates'][0]['block_nTime'] == 1231740736
def test_tx_block_height(b182t1):
assert b182t1['chain_candidates'][0]['block_height'] == 182
def test_tx_block_hash(b182t1):
assert b182t1['chain_candidates'][0]['block_hash'] == \
'0000000054487811fc4ff7a95be738aa5ad9320c394c482b27c0da28b227ad5d'
def test_tx_tx_pos(b182t1):
assert b182t1['chain_candidates'][0]['tx_pos'] == 1
def test_tx_in(b182t1):
assert len(b182t1['in']) == 1
def test_tx_in_pos(b182t1):
assert b182t1['in'][0]['pos'] == 0
def test_tx_in_binscript(b182t1):
assert b182t1['in'][0]['binscript'] == '47304402201f27e51caeb9a0988a1e50799ff0af94a3902403c3ad4068b063e7b4d1b0a76702206713f69bd344058b0dee55a9798759092d0916dbbc3e592fee43060005ddc17401'.decode('hex')
def test_tx_in_value(b182t1):
assert b182t1['in'][0]['value'] == 3000000000
def test_tx_in_prev_out(b182t1):
assert b182t1['in'][0]['o_hash'] == 'a16f3ce4dd5deb92d98ef5cf8afeaf0775ebca408f708b2146c4fb42b41e14be'
assert b182t1['in'][0]['o_pos'] == 1
def test_tx_in_script_type(b182t1):
assert b182t1['in'][0]['script_type'] == Abe.Chain.SCRIPT_TYPE_PUBKEY
def test_tx_in_binaddr(b182t1):
assert b182t1['in'][0]['binaddr'] == '11b366edfc0a8b66feebae5c2e25a7b6a5d1cf31'.decode('hex')
def test_tx_out(b182t1):
assert len(b182t1['out']) == 2
def test_tx_out_pos(b182t1):
assert b182t1['out'][0]['pos'] == 0
assert b182t1['out'][1]['pos'] == 1
def test_tx_out_binscript(b182t1):
assert b182t1['out'][0]['binscript'] == '410401518fa1d1e1e3e162852d68d9be1c0abad5e3d6297ec95f1f91b909dc1afe616d6876f92918451ca387c4387609ae1a895007096195a824baf9c38ea98c09c3ac'.decode('hex')
assert b182t1['out'][1]['binscript'] == '410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac'.decode('hex')
def test_tx_out_value(b182t1):
assert b182t1['out'][0]['value'] == 100000000
assert b182t1['out'][1]['value'] == 2900000000
def test_tx_out_redeemed(b182t1):
assert b182t1['out'][0]['o_hash'] is None
assert b182t1['out'][0]['o_pos'] is None
assert b182t1['out'][1]['o_hash'] == '12b5633bad1f9c167d523ad1aa1947b2732a865bf5414eab2f9e5ae5d5c191ba'
assert b182t1['out'][1]['o_pos'] == 0
def test_tx_out_binaddr(b182t1):
assert b182t1['out'][0]['binaddr'] == 'db3b465a2b678e0bdc3e4944bb41abb5a795ae04'.decode('hex')
assert b182t1['out'][1]['binaddr'] == '11b366edfc0a8b66feebae5c2e25a7b6a5d1cf31'.decode('hex')
def test_tx_value_in(b182t1):
assert b182t1['value_in'] == 3000000000
def test_tx_value_out(b182t1):
assert b182t1['value_out'] == 3000000000
def _blocks():
"""Bitcoin Blocks 0-200 as an array of hex strings."""
return [
'0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000',
'010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e362990101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000',
'010000004860eb18bf1b1620e37e9490fc8a427514416fd75159ab86688e9a8300000000d5fdcc541e25de1c7a5addedf24858b8bb665c9f36ef744ee42c316022c90f9bb0bc6649ffff001d08d2bd610101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010bffffffff0100f2052a010000004341047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77ac00000000',
'01000000bddd99ccfda39da1b108ce1a5d70038d0a967bacb68b6b63065f626a0000000044f672226090d85db9a9f2fbfe5f0f9609b387af7be5b7fbb7a1767c831c9e995dbe6649ffff001d05e0ed6d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010effffffff0100f2052a0100000043410494b9d3e76c5b1629ecf97fff95d7a4bbdac87cc26099ada28066c6ff1eb9191223cd897194a08d0c2726c5747f1db49e8cf90e75dc3e3550ae9b30086f3cd5aaac00000000',
'010000004944469562ae1c2c74d9a535e00b6f3e40ffbad4f2fda3895501b582000000007a06ea98cd40ba2e3288262b28638cec5337c1456aaf5eedc8e9e5a20f062bdf8cc16649ffff001d2bfee0a90101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011affffffff0100f2052a01000000434104184f32b212815c6e522e66686324030ff7e5bf08efb21f8b00614fb7690e19131dd31304c54f37baa40db231c918106bb9fd43373e37ae31a0befc6ecaefb867ac00000000',
'0100000085144a84488ea88d221c8bd6c059da090e88f8a2c99690ee55dbba4e00000000e11c48fecdd9e72510ca84f023370c9a38bf91ac5cae88019bee94d24528526344c36649ffff001d1d03e4770101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0120ffffffff0100f2052a0100000043410456579536d150fbce94ee62b47db2ca43af0a730a0467ba55c79e2a7ec9ce4ad297e35cdbb8e42a4643a60eef7c9abee2f5822f86b1da242d9c2301c431facfd8ac00000000',
'01000000fc33f596f822a0a1951ffdbf2a897b095636ad871707bf5d3162729b00000000379dfb96a5ea8c81700ea4ac6b97ae9a9312b2d4301a29580e924ee6761a2520adc46649ffff001d189c4c970101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0123ffffffff0100f2052a0100000043410408ce279174b34c077c7b2043e3f3d45a588b85ef4ca466740f848ead7fb498f0a795c982552fdfa41616a7c0333a269d62108588e260fd5a48ac8e4dbf49e2bcac00000000',
'010000008d778fdc15a2d3fb76b7122a3b5582bea4f21f5a0c693537e7a03130000000003f674005103b42f984169c7d008370967e91920a6a5d64fd51282f75bc73a68af1c66649ffff001d39a59c860101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012bffffffff0100f2052a01000000434104a59e64c774923d003fae7491b2a7f75d6b7aa3f35606a8ff1cf06cd3317d16a41aa16928b1df1f631f31f28c7da35d4edad3603adb2338c4d4dd268f31530555ac00000000',
'010000004494c8cf4154bdcc0720cd4a59d9c9b285e4b146d45f061d2b6c967100000000e3855ed886605b6d4a99d5fa2ef2e9b0b164e63df3c4136bebf2d0dac0f1f7a667c86649ffff001d1c4b56660101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012cffffffff0100f2052a01000000434104cc8d85f5e7933cb18f13b97d165e1189c1fb3e9c98b0dd5446b2a1989883ff9e740a8a75da99cc59a21016caf7a7afd3e4e9e7952983e18d1ff70529d62e0ba1ac00000000',
'01000000c60ddef1b7618ca2348a46e868afc26e3efc68226c78aa47f8488c4000000000c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd37047fca6649ffff001d28404f530101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0134ffffffff0100f2052a0100000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000',
'010000000508085c47cc849eb80ea905cc7800a3be674ffc57263cf210c59d8d00000000112ba175a1e04b14ba9e7ea5f76ab640affeef5ec98173ac9799a852fa39add320cd6649ffff001d1e2de5650101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0136ffffffff0100f2052a01000000434104fcc2888ca91cf0103d8c5797c256bf976e81f280205d002d85b9b622ed1a6f820866c7b5fe12285cfa78c035355d752fc94a398b67597dc4fbb5b386816425ddac00000000',
'01000000e915d9a478e3adf3186c07c61a22228b10fd87df343c92782ecc052c000000006e06373c80de397406dc3d19c90d71d230058d28293614ea58d6a57f8f5d32f8b8ce6649ffff001d173807f80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013bffffffff0100f2052a010000004341046cc86ddcd0860b7cef16cbaad7fe31fda1bf073c25cb833fa9e409e7f51e296f39b653a9c8040a2f967319ff37cf14b0991b86173462a2d5907cb6c5648b5b76ac00000000',
'010000007330d7adf261c69891e6ab08367d957e74d4044bc5d9cd06d656be9700000000b8c8754fabb0ffeb04ca263a1368c39c059ca0d4af3151b876f27e197ebb963bc8d06649ffff001d3f596a0c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010cffffffff0100f2052a0100000043410478ebe2c28660cd2fa1ba17cc04e58d6312679005a7cad1fd56a7b7f4630bd700bcdb84a888a43fe1a2738ea1f3d2301d02faef357e8a5c35a706e4ae0352a6adac00000000',
'010000005e2b8043bd9f8db558c284e00ea24f78879736f4acd110258e48c2270000000071b22998921efddf90c75ac3151cacee8f8084d3e9cb64332427ec04c7d562994cd16649ffff001d37d1ae860101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013cffffffff0100f2052a01000000434104c5a68f5fa2192b215016c5dfb384399a39474165eea22603cd39780e653baad9106e36947a1ba3ad5d3789c5cead18a38a538a7d834a8a2b9f0ea946fb4e6f68ac00000000',
'0100000089304d4ba5542a22fb616d1ca019e94222ee45c1ad95a83120de515c00000000560164b8bad7675061aa0f43ced718884bdd8528cae07f24c58bb69592d8afe185d36649ffff001d29cbad240101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013effffffff0100f2052a010000004341043e8ac6b8ea64e85928b6469f17db0096de0bcae7d09a4497413d9bba49c00ffdf9cb0ce07c404784928b3976f0beea42fe2691a8f0430bcb2b0daaf5aa02b30eac00000000',
'01000000378a6f6593e2f0251132d96616e837eb6999bca963f6675a0c7af180000000000d080260d107d269ccba9247cfc64c952f1d13514b49e9f1230b3a197a8b7450fa276849ffff001d38d8fb980101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010affffffff0100f2052a01000000434104e0041b4b4d9b6feb7221803a35d997efada6e2b5d24f5fc7205f2ea6b62a1adc9983a7a7dab7e93ea791bed5928e7a32286fa4facadd16313b75b467aea77499ac00000000',
'010000007384231257343f2fa3c55ee69ea9e676a709a06dcfd2f73e8c2c32b300000000442ee91b2b999fb15d61f6a88ecf2988e9c8ed48f002476128e670d3dac19fe706286849ffff001d049e12d60101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010bffffffff0100f2052a01000000434104977367164ca24f1f2de2e2cfb9e5c3f22d510d3f33683de200283100af0c8667dba7e4e389fa9953c6cb83d6ea72990e139f529b58cfbbac27607a28207b2a37ac00000000',
'01000000f5c46c41c30df6aaff3ae9f74da83e4b1cffdec89c009b39bb254a17000000005d6291c35a88fd9a3aef5843124400936fbf2c9166314addcaf5678e55b7e0a30f2c6849ffff001d076084930101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010dffffffff0100f2052a010000004341045ca3b93e90fe9785734e07c8e564fd72a0d68a200bf907ee01dabab784ad5817f59a41f4f7e04edc3e9b80cc370c281b0f406eb58187664bdf93decc5bb63264ac00000000',
'0100000009f8fd6ba6f0b6d5c207e8fcbcf50f46876a5deffbac4701d7d0f13f0000000023ca63b851cadfd7099ae68eb22147d09394adb72a78e86b69c42deb6df225f92e2e6849ffff001d323741f20101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0112ffffffff0100f2052a01000000434104cf37a46b304e4dad17e081361502d0eff20af2b1360c7b18392a29f9f08ae5a95aa24f859533dabbc8585598bf8c5c71c0e8d89d3655889aee8c49fd948f59feac00000000',
'01000000161126f0d39ec082e51bbd29a1dfb40b416b445ac8e493f88ce993860000000030e2a3e32abf1663a854efbef1b233c67c8cdcef5656fe3b4f28e52112469e9bae306849ffff001d16d1b42d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0116ffffffff0100f2052a01000000434104f5efde0c2d30ab28e3dbe804c1a4aaf13066f9b198a4159c76f8f79b3b20caf99f7c979ed6c71481061277a6fc8666977c249da99960c97c8d8714fda9f0e883ac00000000',
'010000006f187fddd5e28aa1b4065daa5d9eae0c487094fb20cf97ca02b81c84000000005b7b25b51797f83192f9fd2c3871bfb27570a7d6b56d3a50760613d1a2fc1aeeab346849ffff001d36d950710101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0120ffffffff0100f2052a0100000043410408ab2f56361f83064e4ce51acc291fb57c2cbcdb1d6562f6278c43a1406b548fd6cefc11bcc29eb620d5861cb9ed69dc39f2422f54b06a8af4f78c8276cfdc6bac00000000',
'01000000d7c834e8ea05e2c2fddf4d82faf4c3e921027fa190f1b8372a7aa96700000000b41092b870cc096070ff3212c207c0881e3a2abafc1b92507941b4ef705917e0d9366849ffff001d2bd021d60101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0125ffffffff0100f2052a010000004341042b08ebb6ada0057e47a125d0a2e99dd2b86c201d41aaf7aa295c625c2c2f1997a8bc6cb9dbda889669989b8d12804c400168e54b093ad05d1170c516abca7d28ac00000000',
'010000004f29f31e6dac13710ae72d54278b5c97ff6c1646e95b27d14263016f000000004349d6a4e94f05a736ac830754e76dfdf7f140c331f316d1a278517e1daf2e9e6b3a6849ffff001d28140f620101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0130ffffffff0100f2052a01000000434104aef905809d1004da2b0187dd5198e0f5b11ab291230cdcd9606bbc99acd15bc91f951de6307f5adae03f2e18d523ec8778e7d8e9b7c24ba282c8eaa9bcded840ac00000000',
'010000003b5e5b888c8c3da0f1d6c3969e63a7a9c1215a3360c8107a428db598000000008c4cc1b42c9dab1973890ecdfdee032079ed39892ad53a6546844d237634cfe1fb3a6849ffff001d255ab4550101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0131ffffffff0100f2052a010000004341041a24b5639c12b2b0a612011eb780a682020b6312782fef0bc29a75eee7cf66abd081121a0b7b5c3076e055648379c25ed52eff8d2b11871e5a7e0c8604f4053fac00000000',
'0100000082219cebbdc9bcb715efee535c13a44447e99dfaff6d552e9839d30c000000003e75f63c634ed5fb3d8e21de5fe143cfa63c8018fce0fa26cbc628378b9bc343953d6849ffff001d27ba00b10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013fffffffff0100f2052a010000004341049523af765da5408d0a4f9d33af2e103c57b8b31877969173e7a7c313bf09a9803dcceec9b29d0fab4737173e3cb6dcc11ab7f233d8b1d715748ca4d715770ac3ac00000000',
'010000005f411e0d7783fc274b4fea8597209d31d4a511e887a489cebb1f05fc00000000be2123ad48038313b8b726a51cb080bb5a8b81c4166401493b017d2d33520f9b063f6849ffff001d2337f1310101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0141ffffffff0100f2052a01000000434104bff063a080c07aa9d8e7038c9d1bd7e5076fc28dd3e905b76517ad958e9df65e83abefcdbdcd7310231aaaf16a53e9bc24598826a3291e5dab338675618e7f12ac00000000',
'010000002620766fa24558ad47e3a9623cd17ff4623668768dbea19ed5a1358e00000000dc1490b5ba227b1adbb2513f74e0252e8fe68b6c7de74c1a22adb63b14e8c16712466849ffff001d344eb75c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0105ffffffff0100f2052a010000004341040baf81984793a57324f886d6a9a040513258dc310fd95e66955f9890d51a4efcc9d5bf69da30be69e16e426770b9927c18d0a06c610c6d7ebfbc6a59023a4ad0ac00000000',
'010000009810f0fa1817a4d2d371a069addaafab2ca99887abcc5bd2528e434100000000654f005a6e4b4b57b42343fb0e47f32079b4ebfe643c2ea4ea20e46c3af00c238d466849ffff001d364c8cb30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0110ffffffff0100f2052a01000000434104a43f4de451f220ee2606ed79883797000e57a17ad8d418eb2de21339a045fd1e399953a0b9474b2e1fa510696c1eb4812dcd0ea4fda1932344bbd914f52e4e2fac00000000',
'0100000081203520416c370fde3d6d46e82ed4332b5035bfba848ff97207357100000000bdaed84e0cbab735880d4763a1eb2df1ecd59dc261f3446db37bed5b6ccb99f331bf6849ffff001d2e5bd48e0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0109ffffffff0100f2052a01000000434104d3a4fd2722f5754102b96cd8b8ec7ea870c10ed5363290b8ac92eaf6b1202a569ed95ed4957c644df7fc0794774b03bf9f63baf7a55cde9e5076774f67987b34ac00000000',
'010000004409709aff1b155be4f7a9ccef6121345050be74b4bad1d330940dbb00000000ec77d34cb2f84f3447c37ec1b4476e044e88478378998bd55d031f58f4e261c35fbf6849ffff001d32cb39a00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010dffffffff0100f2052a010000004341048e2c3ad3142a375ee969eccb3e14470971d898bc67077f28293245b97afbc509d180829f4cd419c208c8302728dd51a6d88392fb30a09d3ec27dd23fb2346d03ac00000000',
'01000000cb9ba5a45252b335fe47a099c8935d01ff8eef2e598c2051631b7ac50000000031534f7571b5ea98c1318eed04937d6ff16582ba72c53552581c40828b6ce2f5cac16849ffff001d080315e80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0121ffffffff0100f2052a010000004341042cf59fafd089a348c5834283573608e89a305c60a034604c7d22dde50998f1b9bb74681986ca1884a6b1df8ce7f1b79a2277057de855a634626e7a5851c1e716ac00000000',
'01000000db643f0756bb4f6b25ce4a475b533d9ef75cd536e72df664fb9c91bc00000000cb527bd29495c02c9d6515de91ef264df333447e48ef730f3b66ffa8db3eb38630c46849ffff001d155dbb2a0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0129ffffffff0100f2052a01000000434104c26035a5e1e1db8b18043d1b31cef10a8fe6f58757e2b3e5147154df627ae020f39720adc9a623857db379217063896ef28ea6b8a0a43e7f1bec4a36dc68185cac00000000',
'01000000c4d369b723c2cf9be33cf00deb1dbfea0c8ccd12c415f29434ff009700000000c9c0fd0ae7b7973c42fc9e3dddc967b6e309570b720ff15414c08365f005992be3c56849ffff001d08e1c00d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0136ffffffff0100f2052a01000000434104b949980bb46aee11510519b4af0dfcc3cc7464b3ede15f184b7c8126a98bf6d6e698eaf16b938814174a002ba24daa03e59a7c0927248517b581c09ec70f216eac00000000',
'01000000e3f6664d5af37062b934f983ed1033e2011b42c9b04735276c7ccbe5000000001012aaab3e3bffd34055aaa157bf78792d5c18f085635eda7046d89c08a0eabde3c86849ffff001d228c22400101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0138ffffffff0100f2052a01000000434104804d71f6a91c908a973cae7ef4363f7689520116b995d6936328de00be56f92baee0dabf3a240e0ed2dce7f374f12cbba7649808528236cb04c558f028dd61edac00000000',
'01000000627985c0fc1a71e052a5af9420c9b99845432ae099f27a3dea7370a80000000074549b3151d6dd4ce77419d01710921b3211ed3280bf2e3af2c1f1a820063b2272ca6849ffff001d2243c0240101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0147ffffffff0100f2052a01000000434104180bfa57bff462c7641fa0b91efe29344a77086b073cd9c5f769cb2393acc151a4e7377eaabacc39f5b2bd2cd4bcb5ed1855939619e491c79c0bb5793d4edbf3ac00000000',
'010000008f31b4c405cfc212fa4e62840dc8d0c529ed53328bb1426c3bb23fa700000000e0af3bba9e962ce288d9e232d28a1ba9c85bd1e298890738a65b93ed97192b85a1cd6849ffff001d14cadde70101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d014dffffffff0100f2052a01000000434104bb31566291ceb63ed600b0c29d6825d7924e73c265abac43fa088ad3e1a477c6fd15373fc40229cc895732d4175f6db615e4a2b6e9411e92974b6923d7985518ac00000000',
'010000009b2d32c7828a80644b92b773357b557462a1470d4216e8b465a472b5000000005a4d7d92cd839cdb7dc448902438e4a4885721487de33900b34558bd6f255dd01dd06849ffff001d2ec3842f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0150ffffffff0100f2052a01000000434104ce29c26fb59eadf6b9b38a3e7f52646877bcdcd7b5f290a47a7a61668bda2c82c8f13f66e8665cfe7594d7da51f431df4df2f60df08ecfd53d2f2d076b4bc24eac00000000',
'01000000de44324d0f70a14985385f4399844b17925ca24e90b425f543d624f8000000007d282068b770b35b587a9fb4356491d5854bba3b60d7c1a129d37ed6b54e346dead36849ffff001d013eca850101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0157ffffffff0100f2052a01000000434104655798e5ce83da2f24955e16396846528d919c223c9c2945b211b722d55a85ccb8930066eaf34c501ac37c4d41c8e579ccc00f54d5a5f1b3c27de525ece5a5d5ac00000000',
'01000000866f0cc679170b6a99e8b93e58dc276cf64f0379112d128e126dd9dd00000000689a44cb1c69d8aade6a37d48322b3e97099c25e4bcb228a9dd2739febda90e6c0d66849ffff001d0003e8ea0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0107ffffffff0100f2052a010000004341046312c744cb65614769150309590257c89cf5d2ee4f048e5a944fa50c768d205ee6083314a55d8b3f9caff539758c13a8f73be4aac6a6e8c5a917dc670e042f11ac00000000',
'01000000ddd64fea2fd6e3b10b1456f2ad2a870ff5ff8ed524304d928eee197c000000006bcae7125656cc0d6b3dc563ab3e98d5496dcbd89785095138b143a48bc18414d7d66849ffff001d280002600101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0109ffffffff0100f2052a010000004341046154d4ed678ddf5fbb9e09ef7d1cf3a8053f5a09fc89303d150d4e715bc691b240c93729cd412d707c48f7bafe41db684c2f9a39aa5995786827e9ed4d185e36ac00000000',
'0100000012ad62326d4d1d7d32d2f169a1a816984f6298fdb5ccc3f606d5655600000000201e1ad44f0ae957771d2e60fa252594e7fcc75a51db4cdfb5fbaeb38612390490d96849ffff001d062167710101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010effffffff0100f2052a0100000043410427df882a156b4c29a4d05eec8d0c4b53f976f7e0ee2639dc8cbb6327451b8cf0957abb3418696243f392e73b586ea4b074e5bdc856a234c9e174615e3bbcea24ac00000000',
'01000000aa698b967619b95c9181ebd256700651aaa1255fe503f59b391ff0b2000000005a8da000e1a2258630dd6f0286ddc24b7b0ef897f3447138c9a3ccb8b36cfa9e47dc6849ffff001d07e8fbd10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0112ffffffff0100f2052a01000000434104a449ee292e46745b6e8e350a04777ee938f151af438a6b80c2fb86a65e71e6a677897ef1f8e997dcddc024d92639620e09021710ae3076836c90f3fa2fcdedd3ac00000000',
'010000008b52bbd72c2f49569059f559c1b1794de5192e4f7d6d2b03c7482bad0000000083e4f8a9d502ed0c419075c1abb5d56f878a2e9079e5612bfb76a2dc37d9c42741dd6849ffff001d2b909dd60101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0115ffffffff0100f2052a01000000434104462b00d67b254e4b10de6fe0b277b6a34f2438758fc5fc2c18c5b53e9352322b94d2be6ca5a2557ebf55905f7dc6cca699162b0faf919754b5ca369d31d0bb31ac00000000',
'01000000f528fac1bcb685d0cd6c792320af0300a5ce15d687c7149548904e31000000004e8985a786d864f21e9cbb7cbdf4bc9265fe681b7a0893ac55a8e919ce035c2f85de6849ffff001d385ccb7c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0116ffffffff0100f2052a0100000043410492ade9a7a1fde25016c02d223e2f1c501d2af11b492e0a3f0cd617a01798c5f5eabd9d5957a7b2d66d1b42f688a6fd5d2bc60ad0d7a00f6006fc4838fb4c248aac00000000',
'0100000050e593d3b22034cfc9884df842e85d398b5c3cfd77b1aa2a86f221ac000000005fafe0e1824bb9995f12eeb4183eaa1fde889f4590191cd63a92a61a1eee9a43f9e16849ffff001d30339e190101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011effffffff0100f2052a01000000434104c9eb99d7bbbd9acfab695c8aa8b931beb43292f2fecdc19d7e687b524d2e2c8a9d167f9be930634adae005424b441a9de8e8c15d8b2462661eb78418a8aba662ac00000000',
'01000000f8000cd0261cdcd7215149ff2f0090c93b0857f0f720d0e8cdee782900000000d9a6665d16cf43ec412e38aef57098c9b5ff613bfefc1ceaa1781e5f087897f6bce46849ffff001d21be2da50101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0121ffffffff0100f2052a01000000434104e049e5c6fb83743c8ef0a640cc82ea14191026643e3e2a1c84edf168bb9690f3c201d04a30c8609aa9406c1eb565e1b9fa218317da5177060de158f25ccc879dac00000000',
'01000000bb36b800114609bfdd0019c02a411702d019a837402f1d466e00899100000000fa2fb24edda69806924fe1ef06bd073264d8b32f55eeaacab45a156563d0d4dd91e76849ffff001d0195ec600101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0124ffffffff0100f2052a01000000434104da82f7361830281e40cf9a8fbc5fea33f35d52ef06b7a72e4b12665374f843839f4a7ad2021e07377c58eb14cc1d01d2017a38281910b6935182cff99930dd8bac00000000',
'010000008ec0e98eaa3378c803880364eb6d696974772bf8d9a9e3a229f4d50200000000f6ef70bb4846dffdefb6daa75c87d7021f01d7ed0590fb9d040993609c9c7bd1d8eb6849ffff001d20e842b00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0130ffffffff0100f2052a01000000434104a4a43c49a491bcad58a1578ddeed00ee13ac58c33a4d50fdb0b06d2ca97b6791097e127f9a4fe2d8f952b2420ef82c669c84d3e65d4fce257d707ec214562c29ac00000000',
'01000000817ac590d6cd50e70cf710266e33382088e111e774a86af831455c1a000000008a15f1ddaef05f8acb0db86b2f4534f68d417f05de65a64073c3d0b7e0eded32d4ec6849ffff001d1b6910e00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0137ffffffff0100f2052a0100000043410455f09e54cc544e9f61d0394db1bec90c30ea89c88c554196ebb68e9c8e02c5ea7f3291521dddb44abee835ed376152f04039634ae7af2b5ef2587776348d0b34ac00000000',
'01000000896e8271cf721a5db7b1dbae43b40eac2a7b0247870b06f47802968800000000595badffff2bb1453255880ba0f33d7be62a2f55b6f266bc26869d2715974c196aef6849ffff001d2c5bb2b30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013affffffff0100f2052a010000004341048644947f13359243d46be6436fb04bc20f92c1ed34b9548b31838bdb5ef721da70c360f2a8886e47addf7b3f87142b5245d95c9bbd6e36f43055998174db8417ac00000000',
'01000000008de6ae7a37b4f26a763f4d65c5bc7feb1ad9e3ce0fff4190c067f0000000000913281db730c5cff987146330508c88cc3e642d1b9f5154854764fd547e0a54eaf26849ffff001d2e4a4c3d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013fffffffff0100f2052a010000004341041ada81ea00c11098d2f52c20d5aa9f5ba13f9b583fda66f2a478dd7d95a7ab615159d98b63df2e6f3ecb3ef9eda138e4587e7afd31e7f434cbb6837e17feb0c5ac00000000',
'0100000033aa0fa26441ead7005df4b0ad2e61405e80cb805e3c657f194df3260000000021184d335529aae22259315be42915b0360deeae97ec428a654014a3d2899ca00ff66849ffff001d0948811f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0144ffffffff0100f2052a010000004341043cba05459e89a46f9f3ef8e01d07ccd4ce9fc2bb35a6508419c98883230374ce1c5e177efb612f30842cd699d1aeaeda61e720592d0518db0f9c2b9de9a2cea1ac00000000',
'01000000632dfba41dda58eec7b6db8f75b25a69a38829915c82e6d1001e511c000000004f08f5265053c96c4eb51eac4ad3f5c668323f4b630af32a66915eeee678f9b36bf96849ffff001d399f07f10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0156ffffffff0100f2052a010000004341045c1f2f8b3f3e51ff51a7230dcc4c0983986a8aecb1751242dd62a0cadb653b90c2636ed13fb479f80f805ba462c08ddc2863367932b9615046adbecc1443af60ac00000000',
'01000000b5969273528cd8cee5b13a095762d731d9c5e30a21b4713ef255c6d600000000f54667bee8511d31bb173bcc6f15b0bf3dc42788a813439bfea9065f90586f3ca6fc6849ffff001d2c9505220101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0161ffffffff0100f2052a01000000434104cad6127556bbeb5753c0e9a0c13787a7f7d4773c1544651072c8a17c5d68d47974b6ae5118344b268ecb52d164917cd9db6cecdcb4e6be44aaf302abd81985caac00000000',
'0100000005ba6ff20c063f7f23b49c53d7004941241eb5347616f406333fdefc00000000b57076c0e5f498a6f06ef26c72e224cd7e25784ed6cd569e570988d5e59bdcd36afd6849ffff001d2edcf3b70101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0162ffffffff0100f2052a010000004341041ef7071c442fef11b695edb2c555bdc14ab797683e709695775e356f8a016969507842c8071353c33ad5d169959712be1cd2ee5db9e2b84769b5b9541b6486feac00000000',
'010000005b74dda1cc03078d30fe49722218667eb31524f22c59687ac30fe04e00000000ede29e76449491b0e2b766dc213c0e15bd7ab6eae48a7cb399c22a48621c5219cd016949ffff001d1b8557c30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0173ffffffff0100f2052a01000000434104ea258025b3a5fa9352ab84951c1141affb5d7e0364ab18ed65d7f970b9af1cd7f9550cb69a016480d825e4e112ba9f16c717ec3a421c02155de4425a2066d208ac00000000',
'0100000083527a686e27387544d284257d9238c5fe3d50fc9e6ceb5b8d8b4346000000000201df27519bd574817d5449758f744e42d648415d1370b17ac6448b6ccc9cfe20036949ffff001d05727a3e0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0174ffffffff0100f2052a0100000043410494a494a15a35b0fb7d69d03e1a0824d73933d57ca21236fdb4aa9412f19c4a5bf772b50a0544e743af767582523840eeee684aac0e21d5da4b6a41320564ac24ac00000000',
'01000000c0d1e5e651f40fd9b0a4fe024b79f15fa65f1d85bbf265582ccf93f0000000002837870b786929d9e30d651dcda7c3006a04b79d292261031a4235328b0f0fbc5c066949ffff001d1c00dd1d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d017affffffff0100f2052a010000004341042f1c4a33266a8d0560b16a1ca35982ef0414ef30b268786de15c8c9836b91a612f5a043f026e3787bbb40952e4acd6a21d34285d939bfa6a23ba94b72de78247ac00000000',
'01000000917354007e87c5ea0a1bea34d5275718a40d082bdd28717d7075f34f00000000e43721163a2bdbc80493a9e0b65d20b1ce63ec4c5ffadc39ea01e13d4e053596d4096949ffff001d1e2f18120101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028200ffffffff0100f2052a0100000043410438a186480a0dba427b44e72dbcfa73c7c7597ad0facd2fce69aa6a227d8533bb2219f1a6d7b0e057f52f7075d1858f2a9f725cdde03dc22d9ea6db10f6274091ac00000000',
'01000000f12ee37c151ee80a22be4f6ff155646addc588cf604e3cf354dfb4750000000095ca77f0c5dfd190be1eab32399d93555666cdadb8f44eb0636a608414b10d3c400b6949ffff001d160ab4500101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028300ffffffff0100f2052a010000004341047fcd0b4211b1c3cde62dcea8da192533f3fd1446734c221f5ff54904d28803eb77c16aeb9b1a0562e79d38b9ba281f9fc0a4c24d7d8d5af9d4c11b670dc61fc1ac00000000',
'010000004aa5ae0b1842e2daa39a019e1a6cfad2306aae707b035f3ee571710f000000002d00540fb7aa5cf6fefc567912eeef891a19ac2f9fc055eafd229b1a73e1a182470f6949ffff001d029563220101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028c00ffffffff0100f2052a010000004341049530a4ab6a53952c94488cada1f77631ed1d29d3b4c8597d8b68623f290f412752c087f2ec0fbda29dbb8496984ff6e1f8b0134da5b8011d34d9b3bfdf702e5bac00000000',
'01000000df2c4d42797dd61991b8df3033716f364b33f87a7cbd3494b8587ac400000000e1fe31bd4e94cd3a004849125ac5951703d34b33f3a90ca1ddc67ae4f8ed6eae2d116949ffff001d374667530101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d029200ffffffff0100f2052a010000004341042a988f492cef43f38089fc1a6f11ab07f3cd52ca3c0fb6850bc2d31b83dbc97c7575203691a5afe51fc36d287bff42506979e40ab61a758adafc51f746b2d6aeac00000000',
'01000000c49052b367c9cfc10792aac007acdf986aa1e60fdbb87193cbd6732900000000eea3f31766c62e47ca1e9ccd303e37404887a570375079fa030b3e036ce71c7038146949ffff001d0552ee6b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d029400ffffffff0100f2052a010000004341043fcbb41d06811e2df06c70dbf78e63c6a4404c28a0c990a837ee54f6c09fe259182f4d18233399262a640ac8c2f8b58b354bcf57ea7bb2dfb6261ceb8ca908fcac00000000',
'010000002aa08c1efce70618d7370e0383a0b5801cafc5ecdc8108e34d93fe42000000004f0c28db6791823456c979edc21f8e9615a037c410299a745f2e7af03cf33107c8166949ffff001d22e2cd270101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d029f00ffffffff0100f2052a01000000434104480b0d9f662fa54744f9da98a4671b89290dd30a1c4aabd9fd276417f6671770dc0c3bddca76080bfd4fc37d85cf84260f06aaa8a15b141f2f98da4215dd9b59ac00000000',
'010000008e6285267ce431a52e3ef3c46eefc4a144f51195f3bf8489c891ffeb00000000a4d66fc5b10430fcfd14558e63d19b649a61ee95b71b1bcce948b1d53583dbebab176949ffff001d4f7aef040101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0106ffffffff0100f2052a0100000043410419ba01c19d1c68af6bc780289ec1a5d4c181e81089f275325c6e1abc1c2f44c67d99ba9be5d3b9c0b903a8655b853c62717bb99924a1d9bf501d3f9f12b56dc5ac00000000',
'0100000066184d75b89754b5363036a66b0aa70142ae537e9c2a64c5175f97310000000049935f8c517625d3560f23a3cdf82fef68779c99f4a92931c91d8c11517c5cf137196949ffff001d2dc932c10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02a100ffffffff0100f2052a01000000434104066687fe17381d3ea7490b084a2e9cab4ab37369089accc8b005cbea944789d1e34fcc418ed70c98936ae2c97cd8478bdc37264260421999b150573d1106d492ac00000000',
'010000005002c9b34042ac70ac8e36b1840672d69cb0ba6ada5effb6477de4aa00000000743a0389e4d8c9f60ad41025b797fd25e228123c4b54b5df20ed02ca97781df03c1b6949ffff001d21537e7a0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02a700ffffffff0100f2052a01000000434104931ec0965431158d18f122abc012edfa87e83df07028d2d3bd71504de74cac5798608b8283278f885639030029b1e5215f4a087506be0455fc9ebb5fa2aadd0eac00000000',
'010000000d765e68e3487bd6d3372dd9eeca050857cf6c9bdb171fcdbe34d363000000001567e4c48479995636794ce5ec794eb145c1194478f45bb0a45cc11d8cc27fb1581f6949ffff001d28d2dbc10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02ad00ffffffff0100f2052a010000004341049977f4ef44b1c7c3854e52c3706c3a9614d7981ba3a7d4c7a168399c2ae324242c9b1e22e5ab8d0e8f732d56f353555e6c8ccb6116b1f46ff5fcc0298f9fadfdac00000000',
'010000002bf72d8a5d6ea0889a5b52e19f53268423d644d3d61364174b859ccd00000000be23d982899e45eb4f5095cbc1c43ddc9495e93fd1e4f0bb3a20fd461412c5bd7a216949ffff001d14fc8df00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02b400ffffffff0100f2052a01000000434104440de3fdefd849e2d4725cef4dccf25a7bd8b8dcc181edaf189391790a5cae414a08e75a76df0fc4ed2fb79dfba9fdc3026ceded3582a34a20fffcfa4fb12153ac00000000',
'010000004c812cdb1077ddb53fa3da180758d29b49262cc37eeaf9ef74a8afbf000000000743ebb1940fb72a15cebc9dbe481ea7625c70790a56bedfb7d74e0ba8227880e3226949ffff001d182b34b30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02b500ffffffff0100f2052a0100000043410443e7444ce6d177ed64331242f38feb451362b84f4ad5a0337b40022ade0fe1e5dbc923c6a5965fbf25a58561c5a28edec98d06e26c5c3c39000fe41a1325d004ac00000000',
'0100000039e975250e63187ecb299082518f8da887198ea2b0834a1089cdacdd00000000b87adb107589f869ca344a457dec051371352b2f38be825d914139b568305faa7e256949ffff001d3a42e6fa0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02bc00ffffffff0100f2052a010000004341048bc631c0da8fa813d18cc2cbe89559054e135bdd4a590f3a25fc66ff642e2de7f18f5f398c6f00a12df74f5a82053bfcd4ccb55bd4f231d2ffee618e7267a1c2ac00000000',
'01000000cce04fcc1138bafcf657f97e31c30705b991827071233deb2eae63ba00000000cb9f33326bbf60634a0634c3bce1c4a7e43ac4bd3fe54a654ae35be3f6ac83fdab286949ffff001d2654f2460101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02c600ffffffff0100f2052a0100000043410439fc9a6971cd37f858dca09e85506cddf42ceba231d3a53960bc8b6ca73d98c4785ff33a7c24f5de6a251381c87fdc104867df420495b7d93d39861cf5238c36ac00000000',
'010000005714bd772bcbdb97a08d32cc82469cadbf7feb69bb4131a993bc7c7f00000000e19a9f3635b503e037212f13f6dd2b40a6b2d81379b9b341df3e33c14c22a3de8a2b6949ffff001d089368dd0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02c700ffffffff0100f2052a01000000434104d2b452aa16024f4eb306bfc304e7f1ff5a01c2f4953859ea6608a7ad5a7c3e5b238bc6310d3816f7b8851e06a3c218911a16c3eedf7d1db982b06dba8c399a34ac00000000',
'010000007a127b3a7af982beab22647b6456c8cbe6dc43a290c65d87b2abc08200000000b4ff4753f29de2ec4aefcccbb72b113f820894587fb3b7e0218ca6cb648cb441d02f6949ffff001d39a360d50101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0101ffffffff0100f2052a010000004341041ff327e119b244f3c6140c13f94fdde6432fa02aebaac4a263859615080d0096a43d447affc1572e3391d762de5aa8aa650cda877b3bf42ecd157cfe49ad838aac00000000',
'010000005df242b278026fcf51ac4ba5cf5b590e58c2d1d76b2c09b25c52c98e00000000d6be02040ee5f8e52f2e925e6f70c73196064f99f20090bc73ea71516c5472d455336949ffff001d295b06ea0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0103ffffffff0100f2052a010000004341044ec8b0fcf5304f08f97348130e5c12c89f2f1678b1599d7f78b033597944945362ca67001ad40afaac594d09cb25ef323ea1bea28ae01c392397a7c673206072ac00000000',
'0100000056d42459d4e316593155b4fad15dd700b93e9d2eb9999490d49e98ec0000000048b6a7bcf2a59e336da83ee70ddd230fc7e2db16c3c2654494c5502dac012538ce356949ffff001d23c2373b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a01000000434104a5f1b7ea48a8d5913bfc569b1b9e091d7fbbbb5cdb97af0442f40be158fa5af68417e4bde42dfa04326e38613d583aafb5edb675b19e32465e8454fdd4c64202ac00000000',
'010000004ee5095194d71ca1b345ee9f27dbb6815ce4d5df9dc2c3c91ba364be0000000026366720a786e6615b3203909f8df77fc2e96d1afe593bd3d9623d19c481c947aa386949ffff001d1e6cbbe90101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010bffffffff0100f2052a01000000434104737918981d5f4caa7efd6c11cccd696b4b588e823053ce202dbe1b2531f09150f885e32ff0af259d2bfa3f0ba573807addcc096375961f1490a50a860444204cac00000000',
'010000005878d514861163b782b54b2d4c6f6bbdaf22e41c2401e9f84522515a000000000e7dcba835e4c20485b614f252183b53921a8901049ea6ef22f09a42195601b5203b6949ffff001d22c632130101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010effffffff0100f2052a01000000434104c8e38a2d330ef2c1317df46c906ed13c462bf5f360f1e26b1427ea38bf8f4075c91742ae5a0658b5beffb512dcc00f661b4813e85f584708204db09e4f6bd9b6ac00000000',
'01000000ef06fa30dd7275529ae9d2677998c4d507a07517d28b23e6e08ed2e7000000004ca77b8b243eee32a9b06a8bea33abd5cf517bf68eed73e7fa951f4f30d2a17ec6446949ffff001de4acd41c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010fffffffff0100f2052a01000000434104fd024595ebaa7a83bd672cd7d59e6eb42f0f04e7dc77a4f0634e45c2420d8a949cba06cde901f23d52cda564d494d172ba80281fa2d9c2a2e825e9c462be0da6ac00000000',
'010000002f8b9d4d8ea162a1d2e5fe288b110bf80a92b963b2d30f40956c88a2000000002518bddd47990bc127da5579b114cc3976568c7d0fc8f5b7a4b90478076799fba76b6949ffff001d397d4eb20101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0103ffffffff0100f2052a010000004341045180728bae477149c556f55987b95b926a0da0f542adf2e8f4f811cd929b8e5c73b92ac9f454a2bfc7b1463be7f553b9e7ba26f7229d513241ff3b5cdf6b72bbac00000000',
'010000002100cacac549da7d2a879cfbefc18cac6fbb9931d7da48c3e818e38600000000c654ae2f49a83f60d62dfafca02a221c9cb45ad96a5cb1539b22077bfa87d25e7d6d6949ffff001d32d018130101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a01000000434104e869b280ddc2eb25995d46264e9b94d99214f0c3c3eec3b285c54bba7658edce94a3dca09c2206ec7a81aa1aef82bbf43c548f19731c37744c2bd97742e84173ac00000000',
'010000008fd40a92b8965c798cf25dcdd8395de4ef75f206337de4985a3262be0000000099add42809e35d9c89641de1e9497db2e70bbb283e9b9492599f879533654c5cf86e6949ffff001d30177cef0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0105ffffffff0100f2052a01000000434104f0d742c83392ae7f885fca7739a1681df9039d9930f4eede9fef7b7c85fb191fad43e584345e8b872c84260b39c2e192512ead631083be327a6825fa0ba561e5ac00000000',
'0100000086cff19f969df7040f27de690e7355436001cb3e361e7589526a077d00000000be4544845044c67df65f37d0ba8bb323fe457c141abe38eecdc2e530144bfb8103736949ffff001d31852f960101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010bffffffff0100f2052a01000000434104916315e2bfc2affe420b84b949e53134d996af10a6f6e6a4ffb0158d00ba7433ead8bdd024517e2ec650965ffdf8f6eaf2a5046c0889b3532206f65be6e76a95ac00000000',
'0100000053514b63574bf6c65d576578f6cb2ad0f6256de1454211ddfa2222160000000073cad1e2d193f0d27471b13eb4b1f356aa63de8dc78a58a9128a2115c6eb1e5647776949ffff001d140de59c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0110ffffffff0100f2052a0100000043410426f1f79f707063f225a9d48097d0fe6923afb3f8c210b536c93b8465e8a5d937a90806c0e2861b92a91d050f81c61e837e638aecd53584205bfd521bc9a14aa6ac00000000',
'010000002b120517ca99a3d8361c2a9eef3126fff7c18e3ec365dc2201c315ca000000001d2e4b642f3d14c24f57f237d38acb8e4939855a8ca6ce7dab48e2cd85843d9ad97a6949ffff001d1622692a0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0115ffffffff0100f2052a01000000434104e761595b66d7c108dcec83722a23a24ce0b2cf3f8f5db4fb483b62a36a96e5ad93698cd2006cbca41b4294af51fb61c13202eb9a871c86f8ce3479eccda1fa45ac00000000',
'010000004f8dceb614b17b5ac9c9368906ffb338aeb750a1dfe1adaa67eef59500000000b45cf14a7caeeb5fcb286d314ac2fa85f58df3d5156fa95c832f373930de9bc3b37e6949ffff001d36e9e4e20101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0119ffffffff0100f2052a0100000043410433b8523d01fa02c9ff12b73bb4e6dcb4dac725e0853d119cf13d791e202b4e30dcb6da12d3627d638ff9ff225b6d7667ee7d2a36e5e140634772b6da6041479cac00000000',
'01000000d143e6eb3910c5e54f55655e885727810105c04754ae1edeb349992100000000fc2bd82cfc026bb9594f5e92e7aae7f0c5750e6e7d8dd73812bc1fff792d2712aa806949ffff001d1d5b20d80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011affffffff0100f2052a01000000434104d5897fd3690b923198285547881b89ea914fda24d5ea71db9742244a4b047b2d9a57d24c49067438491eef34585f2c6178bfb45c677afe7034e7b4ae5b603132ac00000000',
'0100000053fb045b4d3ca149faca8e7ea53cdb3168bc58b875e47196b3a6b3f100000000406468307c915485a9c9eabe31cc853e68311176e07e71475c3e26888fb7b7ed30846949ffff001d2b740f740101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0120ffffffff0100f2052a01000000434104212f1e2bc4abbef7682da4e136de7f8cfb6e49cf21eccfed72046ec0ae5f034a282db6ba0391205055aee425ba21e033c94441227eb86cd9f13a93c330afd563ac00000000',
'010000003ce6c27ae14022e4b6ea0a5c3633d156e3e3a47509c1adf085371ba300000000f01258747019514aa5c475cddd59a309347280ead98d19d8df8f9f99eb56757938866949ffff001d18bcb4f80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0121ffffffff0100f2052a01000000434104db2bfc4aee47f0b45ae88bc76431940aa2547ef7ba34e4bf176f7cd37a3713e56a21ebece2a105df52bd6b4d213aaa8c7bb7c6556bff3e2a9d43e4658d5e3af2ac00000000',
'010000004bd0b78e90c6b0f361f395535ac170980de0c8214380daefce31fd1100000000282c9db8313817b4835efab229872eae2b8b5011c2e90ed14e57192984da062359896949ffff001d15c6aed80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0122ffffffff0100f2052a0100000043410476a73b9d3afd848cbe8c99a0b1cca07ad5117d8ade5a314ce7739f0a189a0be77406d13cb6c92fe6d9b69c2fa338136938af9e930db3146fee39c5464a6707e3ac00000000',
'01000000c232af712ac8656ec1305f0eed1a024dfe6a4011897b753c58ecd97600000000c6752761b3f3db282dff2e4c43d1e44830dd42daf448f0398c2511925ccc949fae8a6949ffff001d14ee579d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0123ffffffff0100f2052a01000000434104580bf58d578798b7ac68e19e933ffe544045b43b05ec4b2a40b5da5371664bd88bd008ce766a37607ea34b29b18e2a6a0bd1c47d025eeddf6afd88a193cacdacac00000000',
'010000002fbb2cf37990cba3a83ac9b3b465247d6d56c30898bb680920aa65f300000000671df2bb3376bb03ff686d80d2ffc4794cd7f720b49c9e6e09f494743dc984c1558d6949ffff001d0171077a0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012affffffff0100f2052a01000000434104a48c81c934bd903ab347c7fb03d624c75305cf1e78e66c09f334e1617b52484ea130bceaae53ffd13804b5d188d4f590a469d1e878ad151e1f4fa6e07aeac505ac00000000',
'01000000b1c1b830fa67f2f425c668042dc7f050e114137be60d942a2bc9556e000000002d5f16b75aedef22d1331a6ef93329b3a1a3eb453564c93862fb6386b996b881a18e6949ffff001d05b824d30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012fffffffff0100f2052a01000000434104ad6f8b8c71859ab32eb863c88f33a9c6d621639cb1b6094ee61c7a8bdfa2fd7070c620d8f0f99e98b090ce932a7ea8d7595eadd0d80ce966c52bece59b96c88fac00000000',
'01000000d2e151e4c85e327bd88a0d9fee0f5b37b0fc0d78c268d3460a0cd409000000005a92e14ed75457c0a01680433301e85dd78b7988e5dd9004c46d6d1712e1cb717d906949ffff001d075216650101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0130ffffffff0100f2052a01000000434104cd2f49d4c04cb91fed784dc604f8021ed3ff52a56ffefbff5e6f843079cf34a37d47a5c0c86a98e3a52f2ac8eaef2baa5975620b6bf097ca9d4db5abdec58715ac00000000',
'01000000f0e44c20dc3e5d26a89301741c82703a423dc4e1803cc44cb3bcba6900000000905498df05eed30ba5bb145df31e8b185e947fcfd3b4f4b15a5623af5aa726329d926949ffff001d297eac720101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0132ffffffff0100f2052a01000000434104c50039dc8f8f4878f3d56d88befef5d7a1a1baebac823518dbbd06d36accfa7f28486ea8f6af26a18b3ab415273e1000ea213c53a82f78767347f1a72e69fd66ac00000000',
'01000000f02d0d0cfee875d2b128277f39a82378dfe0cc00d9aba9151fdba81f00000000c07d8b31b161830db5f6198d8933bba12c985618b18fcd6291acb4c2d8d82c8f0c956949ffff001d25d3778f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0137ffffffff0100f2052a01000000434104c00009ba97102d1bd4703d13d0ebcb0743a54e9b14f5ee4190dd96ad7c4beffa61ed95ab3f60187ce8c619843a088df19781fd24f92aa90a2464f02689de4e2eac00000000',
'01000000cae10bd8c753c43529191bc15f2956f96c3c2e9498b3ee8dd506a42100000000d8855c4002ac58a052b1ad12af7179fecf988893093528f2a457beb5fb6b715fe1986949ffff001d275a66780101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013affffffff0100f2052a0100000043410466997c6a5c2d1a61b07dccc317f31f1846c4054b9b8d3623a325358cd120ac6b60cbd6b1fe70102d293732401b7ad20d125de6371905fde9f2c7fbcb4c049125ac00000000',
'01000000063f92bbe049395e3bb6d865a6de0a5b26f4b6b01e90f4bfce381bc20000000090162a9c64459060f320a51253378106c6472a23a9dcd90588f0cc09d00d4dcc549c6949ffff001d212e67680101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d013effffffff0100f2052a0100000043410418b81a849bb96e40b9317cb8daf633cc8994a42b2e7a5d59fe61a4c106fadc1bcf20894b144498742007fbde4736affc2eab2fc26697aa285468e3e62138b1b2ac00000000',
'01000000cc91c80800b3a75d6542b82bc6d8d7024551f9bfb041ee1b0bb8ca0e00000000668b636991dd0638ddb442ee2b10e3184d87e2d059a43076e10512af8814d3d07da06949ffff001d32db67ca0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0143ffffffff0100f2052a01000000434104be296437b546e1c6e70d2f98552c93adf000d2a6cb2bafd17aed5a0c7c52eb04293901c380a8c64417a127963984a6be2e501fb75d2d111eb0e6deffc09f9163ac00000000',
'010000009918d5221408b7a4325c754792ccad2b13e22e9f30dfcc0d965eeda80000000069a57920735cf77470917d6ddb01a83fe26fd9bcd71a360443c08d4d2d59a43372a46949ffff001d31070a950101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0145ffffffff0100f2052a01000000434104c6b866deb67d6f20d7030817e1afa3c57ab94ffa1b3b39d365fa047aa518868697376dec5a78d497965af57cd27af63362c2df85e7823a0144e06a37851e072eac00000000',
'0100000095194b8567fe2e8bbda931afd01a7acd399b9325cb54683e64129bcd00000000660802c98f18fd34fd16d61c63cf447568370124ac5f3be626c2e1c3c9f0052d19a76949ffff001d33f3c25d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d014dffffffff0100f2052a01000000434104e70a02f5af48a1989bf630d92523c9d14c45c75f7d1b998e962bff6ff9995fc5bdb44f1793b37495d80324acba7c8f537caaf8432b8d47987313060cc82d8a93ac00000000',
'010000009a22db7fd25e719abf9e8ccf869fbbc1e22fa71822a37efae054c17b00000000f7a5d0816883ec2f4d237082b47b4d3a6a26549d65ac50d8527b67ab4cb7e6cfadaa6949ffff001d15fa87f60101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d014effffffff0100f2052a01000000434104181d31d7160779e75231f7647f91e53d633839eb9ce3fe096ec522719cc1b9da0237cb9941a059579bee26692a90344417069391a6aa1e4680caa4580a7ab9f3ac00000000',
'0100000084999d1fa0ae9b7eb8b75fa8ad765c6d467a6117015860dce4d89bb600000000ceefaf23adb1009753545c230a374c48851676ccb7d6f004b66dd302ceb5443b4eae6949ffff001d192e9d710101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0154ffffffff0100f2052a01000000434104003e8f977a0f6e276bf8160b0a8eabaa7a45ca4f664f201fec4f8c95fba55cd9698ea8fafec47cf3f008e75ae02769ce550d25fe024a0fc08cdbae2a08651b1eac00000000',
'01000000192f62105285f84e7876b764dde15cc96e3689ccd39ff1131f18041600000000f38b91a939e7f81483f88ffcf3da8607fd928a093746a03b5eb4964ae0a4d2886bb16949ffff001d1541834f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0155ffffffff0100f2052a010000004341044c77009fa2582cb0a28311edf08eb7e3e136e87ed1e6e9cd3a4f755094ca72a77580d9dd4804318201fbf74e34a11ff82eeceb7d4aa9da5b237ae27d08ec8741ac00000000',
'01000000753fbb8b0a766119fe8e9347b55cf6f977bc961d7dff46b87c050921000000004bb7d646fe8e6678ab8829cc899a89f256b6cf19dbddd494a773b057c374002489b36949ffff001d1766221f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d016fffffffff0100f2052a01000000434104cca405a78f6b83ca6e59c441df093c337a70d70015f428595be8b4670d5ba62093e33195b1cf9c220005fc90320c3309cbbc34c2e6da5558bd87b1f5f7c4400bac00000000',
'010000005bbeaaef7d3123d7367e9d68978f0cf8225a2815b3024e0125ef11fb00000000c87ac7e967e3b09e53e4bb31d4d9306465bd8500061c1819b15d451b46bdc95bb7b56949ffff001d2ac2b5100101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0179ffffffff0100f2052a01000000434104953e5321dc6538f67ac273fe70cbfdec386bd389b0bf0c81a257f9351c7c0d9b6f8cb3e4b14fe322158935b04474e5afbded15f3ed5edd055d15368d708704feac00000000',
'01000000aeb1c63f4aab6eb66f12f3c64949f43a4bbd1d13ffe777c3015c4d850000000080ee9dbb0f58c4e12269383c9735abb1c6f03065f40d5238ec6c3e5fec3a88189db96949ffff001d00002aa00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d017affffffff0100f2052a010000004341045695295847de164b8973b15c7e981fc7e6f2ebf224f9b92a6e53b889eb88adb0bcab956b54418c4a5074d3e766f4e887f1593de05179abc0b33c0437504e04b3ac00000000',
'0100000014770aa562d6a32431289058ac1bcfafec815bee4bd2e7eb15197c870000000082232ac15c8d8642df8827fe5e3a297a758447f00c1ee9e51b2e578b22c5e5976dbc6949ffff001d2c5b65bf0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028700ffffffff0100f2052a010000004341049a72c65160bbf2726ac4b4beacec13abf5d65be89f8ddb2347870ab1c091d1e1ae3314ff7384f9da33c31664ff5aafb958a762f37f8726428cab6dc2a5af0aadac00000000',
'01000000ebf2a13396772607b579e5313855d85deb6c2ff5eb4b896d17b0167e0000000002946a80f855fa6e59264de3b84da0ce975ab6d0806a90288bb2cb7f4e782b2016c06949ffff001d049add3f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028a00ffffffff0100f2052a01000000434104d42de70341ea7933849a8ad7f1fab5bdc38fdd56f5cb4cedde3d3823233dbe601421e06d32ed05f08a249faa04aa2e131284c75c7e606d2f692bec7bd5a8026eac00000000',
'01000000cf247ab093cae5a6698f9f3fa5e9bd885ef6589f2e5e5cdd9dd6af420000000030b2b4faab68a1669e4eda67442919f25561f8df26237de4760425433f7f00a33ec26949ffff001d359e2d4e0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028f00ffffffff0100f2052a010000004341040a2e3fd6c40ec70cb3d47dd3cf2208d487eadb3631ce499a474bfec2e2f449b8cadbd02bea341f9d996f5af053168362c9f188641ad2959c3b2a0e48ffc40a8fac00000000',
'01000000aeabc567c5d100b902623137777ee19e9d5b758170acbde0c4cc5d3f00000000c780aae409a4f0992ffad17806e5f339c7b641cbc8562c04fd7319fc835edeab03c66949ffff001d01b4c8810101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02a400ffffffff0100f2052a01000000434104f5e98fbde709bee5465104448a9947f3d8604bc91e1af5e5baca9962ffeb3f1ddd9a3480c718e32334992511ca63971cb679c1027c88f4ea2d2bbed4cf42cc86ac00000000',
'010000005be5ce66b835f9b908c0f1680a009734919fda6f3a81a15861360ea300000000974ea41630fe3addd397cec0f06ee4aae825f9c2d6d789b082d5f2c646e2a3e7a9c96949ffff001d0405f8f20101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02a700ffffffff0100f2052a01000000434104a75502d3670b14b7e5640591191c05868da841699d52f9752a70dc891183a2b9a00f7a5893389f3b973a8ac5e30c20041ccbd99a3889b7ae7a705203d0991498ac00000000',
'01000000356c1a09522af6e71ed56b162cdddd491942547becde82b8d86d6a4d00000000ad1e219b43cca3ca0cdc2f17cab1a4b34a681eb17175916aa2dcee017011383479cb6949ffff001d08e0347b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02aa00ffffffff0100f2052a01000000434104bb12c362f9e454b2766affdc2bdcad548cb7e62202372365a38463798b47fcfc4f20321adb48c16a010dfb43fa5c80bc49d62a23d375d4c5a91cf29529266fe4ac00000000',
'0100000037c36afea0a92552871867226ae9c9107ab0fb2f66de0f64c98027ce00000000e1e5c48a2c1e53c4db2764dc5bab527232d2d31f476240b39e567e256c269021bfce6949ffff001d047b13010101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02ab00ffffffff0100f2052a01000000434104d8fc18bbf716dfdfe6775d83b1b91bdd6ae3fb07093788e96d36c65cff5812f2705e99228bc31e3338011359d96f7419a084d317431ce8a8103dcdc752716d43ac00000000',
'010000006d7c6757b8cec3eaee4805a548bc2f4f083807d70606e48d83769101000000007c23a96d8cf18a918a19e68c5fee3454b5d09ec88b1f63b6c906685495027bca43d16949ffff001d2cb49bb10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02ae00ffffffff0100f2052a01000000434104cd10ce592a9c4918948a5b4e92e9702e6c36eed1a627594f67066792b3a227cedba7be9df6e8117bee0499e243e4df90e13efdd8e14e1778e2247c0e16661057ac00000000',
'01000000bd1ec9370830746c6d00b96f75c786438df4124833b6c4240cd47d5d00000000a4e0c85f9b755b1611de231958c1d7ddf51186095a5ac3d9dee1b1d78594d53e9bd26949ffff001d23df49a80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02af00ffffffff0100f2052a010000004341047a36cf0b12caed035efce9c21f8067afb25c76f008d7f88071618691b3e1bc7be198d74a81f0e5bf37444024beb345d57d1be245cc1516eae397aeac49c02221ac00000000',
'01000000c8ae056e45843ecabc6328e036bb92a3fa8acddb53c273fd39a5492c00000000a9be635b8f87de2c48a738f3e8201f3353c2044d05f44f9aabc4a59c6905865ce4d46949ffff001d2798e9100101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02b100ffffffff0100f2052a010000004341048bdd0ed16efa3601791aa2eed35fb3706a5ed49696f2dccf768ac98673f28560c595dc064c752002c2ca4928348edc20d62499c5f8f5b1ec0225ceb4259368a9ac00000000',
'01000000713c6c20e18ace81b09f7de4367c8e81a89711ebd6e96ee05e80f27b00000000fb4361f015fd0ba2b6d7baf685f0cf6eacf1397f84b2744ff063e63ce76ebfbb3bd76949ffff001d2ddd0ec70101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02b800ffffffff0100f2052a01000000434104f8f1b66e48a29fd15aa26741128bcf731646161b1603861526216c12ec3e89d2b3d34c3d747afccdfd4704a0972a5184151d9ce0776aab237e46f464c145fa04ac00000000',
'01000000c8c43b8c6a02ca773a2817bda6caf2c608c190e903518d7ef132bf9900000000c28142c6baf94b86be7c50af14abe462faa489979826773ff4a932a299cf51448cd86949ffff001d1b00da300101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02bf00ffffffff0100f2052a01000000434104e6ee7fdd0e774bd612f713f46383da9a6a9d101f5a2c0f3fd7b370e0bbf016265466e06e0f9339882b6ef217e3f0cc319cbbddc604fbd70da4fb17f1ece3de85ac00000000',
'01000000f34115cb9177628f46ef37f45deb3e04761dab5d0b88acc3324958540000000068087c53b8717afb90c642f009b8fb9b490e8215e242737f36adaa0690c09737bddc6949ffff001d06a64e690101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02c400ffffffff0100f2052a01000000434104a1e6391bf1c13349210f9cc74788d4f07ee5051d92a039436ca42bb607017ac0e773ab7c2f91541f57e8323e84c2a61f10b8d1fa7762417ba099a745224a8e06ac00000000',
'010000006a4690e6ba50e286b8c63c826399a6ac73be3f479f17406cdf90468700000000f5e6a6e8945968936187c5bcfbcd35db4c259198b38779e494938d18fc5ed641e7e06949ffff001d36a19bef0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02cc00ffffffff0100f2052a0100000043410414f296079b181ab76cd817f8583761d9ba5b00ca46f16eadfab8e0bb3a2b0420831cfea00b5cfcd97a12fd14b469d9385140d187d2bd8add9a1044685db9552bac00000000',
'010000002ac91bab86c12ddf776b1408c8d94700c05502961e0dd5e637cee16800000000ff7ec13a3709b78b7a29035e7c10fa4363afae9d786e2a7ca497db08cc07d93879e36949ffff001d203e2d0b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02d100ffffffff0100f2052a01000000434104a1cf0c83c93ca840f97db5ceb2302c494afc81c75cb29ea1b42c8f7d6b94d8dbbc3f20521999f8d90ad0c83231fe6e9bfd4e6e1c88396ebeeb5260c467651e4fac00000000',
'010000009b5230635b6e8eb413d75661c3b07285145a6f5c38f151015286608a00000000efdb9fea5b31617d311cd69f6f54fc938c6d0da6f659d1fab97f84c9d9b637d6c9e56949ffff001d1f92e6cf0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02da00ffffffff0100f2052a010000004341049345479778f20f34e515b68e02e3ed614ffc5f08b226ef1bbb7f6b9550dc0e753ba54b8842dfb290fdd5089928673118a546536f5275a18f5c243e6a9766f642ac00000000',
'010000007a844a2d5e3211fbc9562904b9951b2503cc877e0541545686d1988d0000000009ae88a66180549143c5ab25014dba8b985672f1806827a4f4b5f9778cef44b97fe96949ffff001d14a906f40101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02df00ffffffff0100f2052a01000000434104b715afd59b31be928e073e375a6196d654a78d9aa709789665dd4aecf1b85ebc850ffb90a1c04f18565afe0be4a042ff6629c398f674a5c632b017d793dc8e04ac00000000',
'01000000a0aaf794ab3acf94f717ee74fcd635cc01badadb296aa11dfde4bba3000000008e27364c36456cfac6156408768d45af49a465f35aae702bc48ce63f97401b6146eb6949ffff001d05adf26f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02e900ffffffff0100f2052a010000004341041fd091cdf05a34ffa28103adf83f399437d5bcf79fa07f55fe98f65a43a82e0ce5adbe6a3abb597d39573693453d8c1fac40ffb2ff2c9d9405940964d7cd6134ac00000000',
'01000000214f1824d6b2eb5f201c6780488187fe72c608bd66f078b51c2baece000000006ac9cb0d762ee58fba505cf59b6d36f2e82ed40cfe19f1016cff52eb7ae34f0d17ef6949ffff001d005660380101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02ed00ffffffff0100f2052a01000000434104327083161527baf5626445d5e795665b260adf7ecc38b000ffba904a9377de16e77e8d3988ef86ba7d2e3faedc3813eebb9821b045d0f3ef0e6d2eacc2867e2cac00000000',
'01000000169995432bd5a5855c37b0437a5e1584955fef197af1147b67fd4a2f00000000c8196e93d912864d817e2a81e07f7cb1a026e3fa5ca80b4c1e1e55bfed4a61289bf16949ffff001d20dcfff50101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02f400ffffffff0100f2052a01000000434104ddfeb7173ed20ae658efa0618fc2f0297f7e0345566bd936e6199807b3fe0560dd1a27d8f7e4b7b8a944653df5c55bc50ab563b5346ac21deffd1a1701564164ac00000000',
'01000000fa2347d35699e4fff52d432daf752b9ddcc0c0f0e3a96e19d3805e7f00000000e1ac834e2c6e79465b2fd284f7ca1feda75d155a88ff8bd2b9c1113c4120480aadf96949ffff001d5c6cab170101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0106ffffffff0100f2052a01000000434104b2b6f8c1d7e46577bd0fa1357cd5fb900a05f07838e6e0c61e04944c2878e96cc4eefcfaeb2bf9fc15e9a460d3a9fc1e3468d16a254ce0e2d38768be729f604dac00000000',
'010000005b6672f14e399b1ec67140ad0367e63506347d266fc865332a757a4600000000bcfed99e7c1771baa0582d46dba9c84268a31f9023c259a0b9027171e1a69a5f2bfa6949ffff001d2638702a0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0109ffffffff0100f2052a01000000434104076258c08254b7a4d8765afe658ca545827e9bfbc0c31c90377cf3459f2b4d83ef008867a1d28cee7f7ee40d6a47fcb2f271934762d23b44576125e87ce6bc03ac00000000',
'01000000f018084fc61ea557815ad3e8a2fff8058c865e8060c86dea337ba0dd00000000bea5824628bd47b2edeb32cb6a46225a2b74c498a9fd4c5077bb259ffa381f9a58fe6949ffff001d1622a06b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010cffffffff0100f2052a0100000043410406e1974e271c855216c07ebf6594c18e06e73d99bfd7a54b391b24a8a29536fc7ce68b349bf098c623a6b219ca83f0422bc20653c3e201fa5a7e820abed4f30fac00000000',
'01000000ebf0aa9d07b693713c4562044354111303dcf300be82240a6bb284180000000036ba73991a7ac5d0a454e2e019e237d3b1e4838de39b93207ee2b0cc5bbf00921d026a49ffff001d059e1ff50101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0115ffffffff0100f2052a010000004341046a69b845c9932aec279e8be18bb5a634f281e403e5a4d2acae4bafd91a50d07324fa27e8d514dddc94d7a4c2cd93514a928390cc260ff6b834190077d09879ecac00000000',
'01000000a1597c83840b54c99a7206effcc252335f2e5daa369cde117e9139a600000000f91fc47ead0859d5f3f8a33f73fd64abd16a21a3b96ef48d52e893741499969a75046a49ffff001d98df280e0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010affffffff0100f2052a0100000043410412b101ed7705dadd2e40ba19c393fa8bf148665f12b03c22e26633452933078acc0aee85f81705ac43aab5f286024917444b117a420435a0839a65e67360639eac00000000',
'0100000067ce0cc7d592a1aefe86fa83ac6101bbcc0d736bd7a0ac91529422c000000000e4a9a2663a445292d046b21dfb56c59abb24b9af231b4913bcf6cdd45eb81b0f90056a49ffff001d19e7838c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0120ffffffff0100f2052a010000004341047305f4965c82a34e51e02de3f85dd5bdd70a8436fb32eb3faf44254efad9229153df556ec2c9c6afe9743a39057323c728102ea7ef6c51fbf29d6d8d8156fd0dac00000000',
'0100000017ba37557e4381bec9bffb1b8c0e6e9e1b045f5d008267559fa3a37f000000000d50983d860884a1414e78d728e30106ac7787018c1ca53ef29555551486640823086a49ffff001d0267d5590101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0121ffffffff0100f2052a010000004341047d2efa0dd72e9626e0b1daa5815c6830d4373e204c38c94a8264d7189b0d5e4221ef298e8f0e39ced8200f1b663ea91002d8dbc08e5ac1bf1a388798e287a679ac00000000',
'01000000f8e93ec183ec2f6428f1c82370595974a35ca60df6bb22f8f97b7bf00000000028ab6cc3287a5ec4f7a4d787a99e5cae6ec7d72e7307551097e949e9aab13105e9096a49ffff001d188fb1d60101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0123ffffffff0100f2052a01000000434104c6b07a546993c948cbddf21bfa3585a9ca65fbcee25ef44c4f298442b2363b738318975158d390c9f56870ddbc50118d391b35d01db25b7931f305d0deda73f3ac00000000',
'010000003a3c477a4943dc98140c07b5970b78278f36f3d16530ea664e1a538b000000004a01544d614d0ceb34ad67b7bcbbb28000ba532067282fee211b36b9b6c8a9f2e80a6a49ffff001d2472798c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0124ffffffff0100f2052a010000004341041deab4b7de0843e4550430b01dfc20540ac1f44dec5d93238afefafd19e963dfa984f35a4750223b03f60c1539b8fa752048a6a465e00915ebd99fe5906fe966ac00000000',
'010000003af99f9879b01c4f520a3df073d7ee31f468d279d22542fd08e6ab7d00000000f1c9801d5e85c29842c8e5aeaf83a0786ef952c19d0d0bee33f78e629e843b50d40c6a49ffff001d196e6c430101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0127ffffffff0100f2052a01000000434104d0bfd22f75e7b082538b756300ace24ecbb752956f3cde8851a8e1e14259cf5c91c98e437f099b0f3302a2997e456c819debb007a235ee7d0a2d07b4f0c7e7efac00000000',
'0100000079ba841ff20eaa20745063d8b58deaeafc50ff30559ae7de395788b800000000cb537ad225f53bdcc69762c164492dc692e190ee3098cf933f82597350982b88f30d6a49ffff001d38909c8c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0129ffffffff0100f2052a01000000434104fb2748e60e5c8e202d15e3576194c81c19e49e2a2ef7976ef2bf1443ea66fc3e36f1299bc57b5b242424240d75e00a951880c93b871d6a262222dae42a1a374fac00000000',
'010000004a823aa83595a947a91df4dffa27d24a4211c1c1b352614d5128cc6400000000d9a7f8d838220becec1802eec1430b4769d6b4699eb95fecb7d1d5c86e6613d80d106a49ffff001d1c45247d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012cffffffff0100f2052a01000000434104313655b2aed748f03cf6e718fa87ce259f35033b5f0933bf990019f5b7a5e1725c5388686186b2bda9512d90d920fdfd998d690622f2e9e43005942f80e205ccac00000000',
'01000000fad05be324f6c411800e9195bde30549522668db30be952e523e9c49000000001d01af6c4716b28de4e5e3385a5846bac1fa90a565579065d904b354da4f8765de116a49ffff001d065c70e20101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012dffffffff0100f2052a01000000434104a624f45c0b4b75bc784748ce8affe84de70556cfdb9cb69f861363699047701b9336ecc9f34eab351b28b2691bf69e5afc2e40b05cc6fa4a8ff377cff0d9dfb2ac00000000',
'010000001d1f73cbfd47c38aefe270faf05873ddaeae469eb57976b067cfb8d800000000d5273768443e9e48d86acc6a1c5fc3925e69d64f11aa17297c1c16e6e339960e5b176a49ffff001d2ac199990101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d014dffffffff0100f2052a010000004341040890f2990714393514c898eaa61353163aa4c2ac5be0cb78c8ffe0b3486d580c5c3ec584f303c04429ecb537144ca28def1a61e0213fbb3c15de9572f8cacdc0ac00000000',
'0100000095033bbd6e41afe1eb283ef23cacd5d72c5551a60c081f2f341698b000000000448490d4ce520ae01c822d2f2b489a3b2805416c21b558cf99cd1dfa1e29a8b0141a6a49ffff001d2bf5326c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d014effffffff0100f2052a01000000434104f435a4df01fa9a93b3051eaf12357927e0b314816edc5b76ef1f96aaf04b833bbb4ba44072dac924ef37795dcf23a2e0e525550e4ea2bbe24d6ac3b045afc100ac00000000',
'01000000e50eaf73b308c0b468bf3a8ab80d9fc6a350e6d998ec0e2869ac3da800000000ba98b85bb12baeffda12c2d2263a701e572219f3c93972e17c07b2aa71cea4731e1d6a49ffff001d208ef6990101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0153ffffffff0100f2052a010000004341047bff1aa919559a9dc8f040ba59e45760f157e46bba653171e34aa45cd1e60872ab910651666fd7c5784a8eb496796e1732573ac5c33e40b5c01e4d428c4c8765ac00000000',
'01000000a0001921bc03feda5874a7954f914cbc7a8a6449e1fa40439b4c3ed9000000004206fdefe7da3b2c5cb0c8dbb12d834ef3ac8d6e2c68050eb716babe95f4169d48216a49ffff001d2a17c9a50101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d015cffffffff0100f2052a01000000434104ca826620e9c39691fa76854004ad10f848d0f750701bdfe5d1d367d6eb85246647d325185307bf085d68d44bf697e9e430867e1b910f5fb7ed8c38e852094c6fac00000000',
'0100000061188712afd4785d18ef15db57fb52dd150b56c8b547fc6bbf23ec49000000003658f907b55bf0e46ac560d0cd6ebb1d141c311c00193ad69a98b4f6b9b6b87058256a49ffff001d260897180101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0172ffffffff0100f2052a0100000043410440a92015df9284b2ce38644cd761abd99566af0d998c3f67b760b1c1534f17cc44602abe879eb01fe19f01c42e8d49047bf112bf122b35fab1f16255f93fe87cac00000000',
'0100000002a8bd45fab7e40d8207ef95762e8578589a1961a9f9991aefb4477f0000000039359e15c0251a9162151f681d2b23c71e734595db271846aeed8736c2ef443f84276a49ffff001d17d2841d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028700ffffffff0100f2052a010000004341048b5f7c6e17394fa995cd22c387de26362f26aa2bd524581d2a76d5aaf7d540b825fbab64db7a537b05ba68c90471d9ed49cf97bef6c9d4b4d7c48b37cbab3193ac00000000',
'0100000012052719601a039f27921ef35a24c82cae5f5024f326a56c8ee8762e00000000a683374124eb823197b4caab534ac172d7da016d06e35ab5465fa883f7b69c42fb286a49ffff001d08d5a80e0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028800ffffffff0100f2052a010000004341049ffcef9e42d9c8ba9394ff3a105810e8eccfb014bdb34d425a2df526be8de06c554311f92ac36904ddadc93cdef0d666aee8a9fc7b51c471836bd6d1efeb62deac00000000',
'010000001c084a379912af47ef38e75d8eec1f6f698b0cead98fe1baebe17f6e000000006f6168c5809c18ab102a28087ed22a5aac41e5c531b39b8a0975ebaf1fc044ae102d6a49ffff001d1434411f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028a00ffffffff0100f2052a0100000043410457e68d6407f0e0a5c3d25a76954f8319b5b7b1bac58fefde3404f3deeb3f0a9d5201e6e891ce592a19bf90e6cad362df09929f6e2bace8820d279a569c3eb2b1ac00000000',
'010000007e9651bb2d6a7298248c64cea76c14c02c1603c1f2961e9f4e487a1a000000008ac11bbf709fd20c4a6adc39dc9827402a4df48149090c38bfa393f20deca655a82e6a49ffff001d373d76ae0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d028c00ffffffff0100f2052a01000000434104c024196cfc6ad36800b5522ffba6dd1c4d58c96cae378bdcddb52c081d742eb219d0cabdd7ef00f0010e18aba03483767ecbcf0bbe428dc92285d7f072c68379ac00000000',
'0100000035a83bdde524407a7bcdfac3232d2bf6710f5559d532bd2c7534b8e700000000474fbb76278470f31077953b66d0ce967e1b3e2e3a4041553b82cd3fe1a2cb5aee316a49ffff001d0354b0b20101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d029100ffffffff0100f2052a01000000434104f3ac806f3c4afdcac540251e2cd7b61f0d60b1195bf6f7a7ce7bae5d966bcfdb17d0ba62190a162aafe7538df373b92c7037e5a8ac820e928498d341240706e4ac00000000',
'01000000933d9038fe5264f9453951d40e55c91504e1920801c85dbb5c27c81100000000b9253cf4f366a018182bab5a30a54c700db0736b540e3ad16fc1a109a81929b530346a49ffff001d19fd5aa00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02a600ffffffff0100f2052a0100000043410488f83cdf5c83e9fc59c2d92092ea32945b880f19c9da12ead521cb7874ada0b8fcb5688470ceabe8289d53c8964e77a5b738be69da305d6208c6725202de68a4ac00000000',
'010000004d1f55defafd65567a149e8cd32e1a2019b993e17f52cfb43357a79c00000000fe0c90dd69f7661425eebf7c913f0dd932691f3b1e3741105dd3600b69b9a9a0d5366a49ffff001d018cf4760101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02ae00ffffffff0100f2052a01000000434104dcecccde1d125055b600ae12e2da68b11a10c9655d60e9315c013b3e04e5d32e681613765c37fee024d6bf49bd62a34a31ac021aab9ec4e67cc9fb061de6548cac00000000',
'010000006af39676eb24f1eaece7abea73ce12d06667c7c3f463de9513c54ef000000000ca7e0cb6eb209f2b422bc3c764e25c656065027dfd60224ee448d12c57fa74b785396a49ffff001d309eaf130101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02b700ffffffff0100f2052a01000000434104f0418233581f77491230adffb4f086fcf029aeae6beef25d7200325e6a2904a89004b312c6215ded5eab744b9a24c16cf19423c9df8d8de02e692800ac90d234ac00000000',
'01000000c66244e034c9bfa52424ad91905b999cb6ff5d49dbe82021d59069ab00000000ca3fae9a5bdd728abb0b3e1953a589d945448dd452331c0199e3dc2b1c5935cb893c6a49ffff001d1c0a17e10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02b900ffffffff0100f2052a01000000434104e38dae71f853e6d2bd5f2b98f9e95105100f6354e65c5946da5640b6cd8ab3f56140d92934d69697b2f1ba12e8c24d817d69b348ddfac83d856918ba8c0aa441ac00000000',
'010000002dab0bfb58146516975016ad954f4d359d683e07fb648a10edcc805300000000ce3465d7e5317988c75a41ea862dc489918005c3d90f7e4590ab3ac04dc1836e28406a49ffff001d08c317350101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02bd00ffffffff0100f2052a010000004341042173053be010806a7ff0cbadb4d30ee3cd7f2035a8d53c002ab0b59c218dac6ac7c8bcd0dfa096eceb093e570c03e8762ce113488897e3b3ec703075a6bcf563ac00000000',
'01000000a966418ed4f17c3c3c561f2bdfb169edceeae84cf1ac526d89918bd30000000052fa7ddc59d3574bbf404011820e1e48cfbee2aa6e8f2f5b472bbfbfab874fe9d2416a49ffff001d17e6dbf80101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02c400ffffffff0100f2052a01000000434104d562908511d1d59bc645a3ba0e385616f00094b4d9b22913e170ac37e0efc7d258734bb476edc17be2c8bb7276dfb11cff10b89bc9db81cf48075a37d379d30bac00000000',
'01000000844617f4b214028227f2a12e1f51713fa9c0b5221bb2dee5bad355ae00000000dc3ebd22794574257ffbfd27b91a86dd7012b2ed308f406523074da63465cccbf4436a49ffff001d34b4a57b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02cb00ffffffff0100f2052a01000000434104c685037c3f28bd6de62ab7976111e3d379349190a34a852a6fdca291d57da731e0e3e83bde490a90db9a63555618b2364d1a21892d907860371c59a909765147ac00000000',
'0100000060c630658c5f01cb69f491ea3fe62c1be151e88dfcbe10fd4de29dec00000000d46520bf3888d22e5fe5e3b42a90b7aac299b95fb494b7918fb4bc117c79c83122466a49ffff001d20d076550101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02d200ffffffff0100f2052a010000004341042fbce47a6f1681a83e4664a5be1c82884fa2b7659f3b9e7e39e075bfdcf024c47a6e7d51a87e004ab560d2f5fb3bc4575e4193788e82a3644f37db66197f55bbac00000000',
'010000001cc93417ce5624c12cb0276687806f821ca362ed9a8c29cf85b009fa000000006de50306437d27771ba99ef09fda4941a6c6c6a9c86e0743b4daea0756c082a055496a49ffff001d0147d4610101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02d400ffffffff0100f2052a01000000434104b2aaadd458aa70af98a3cf3b749aadafaab0a128ea2bce0f3d421984cbffdcd42efc3cb4e0dbe0805514c1142e721ae01b693b0f6eb5039fd47550f1ee904fb3ac00000000',
'010000004e317ab331a5202400278e0c50ca3b6686e57e73bc86ab931245c0320000000016f949e36753aeb0e437fdbbaaefe5a40aa4960e215a1e3adebc0bf787bd52455a4a6a49ffff001d15f687340101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02d500ffffffff0100f2052a0100000043410432c80d5777d7cb2d8d6adb9762002bb0b70b0f21bc73f7a18186552f4b03358e1822b99e3ecd6b1057081caa8cfef68f4fde1022d891e2248d369f83b58ff86cac00000000',
'01000000bb78a59387b1637dbb642533e4a74b38a5195b0a6af8541baa9609c4000000001763035d50efa2bcefe978118171858f930994578cbe46d39a19ef0deb14b02eaa4d6a49ffff001d37d6ac390101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02d600ffffffff0100f2052a01000000434104c9d76c328bf06102ba18beb64709eeadad8d279d295552d8146547eb0b616dfdb25d7d3f3c78dac09e60552eb72f1fe8df17478e958b11bb4fa0dce080d5a733ac00000000',
'0100000026c058c08c4557df6e34c7de02656e0cbd6d764723cfc66808c1192800000000798a54b6a726d075c488cc745ccbbcdf77a4855b8b53356bae38791f8574169a85506a49ffff001d2f6a5fb10101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02d900ffffffff0100f2052a01000000434104d9cce85192f301169ed0666f3ea1ad5f5aa96212aa5262c1d062cc723a4374bae82425de3362abdae621877df33bb638cbe74bda4cbbbed788cce7703da6e1dcac00000000',
'0100000003da8ee59288435e53b260c26bba634075fb06b5835ce62e1feb615f00000000f6e68f7cce7f5d8f9c4765be05fb97c748e55a891bb4f7832d275c0c275f010190516a49ffff001d03d47d150101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d02db00ffffffff0100f2052a010000004341049a365a87ef453617543d628e0c6562e2355475fa1b9af9323cea4880f93beb3d535c6325653c52533543f2453a058e8ea3bc2d58b3b1980e3039ea070e721c1eac00000000',
'01000000ef366900fd58d8d80995065f9e1f229ecb097bfd5e4e86ee9331e9af00000000110cb7ea8c9c3033ae0d411bebe5f901a494b2af8e5ef486292a21f836950b03657a6a49ffff001d0036d3050101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010cffffffff0100f2052a01000000434104e567cd00f8747660310232e8cd2e0461c158f48ef55f2cfc436b22e45f1715cd4cc6cc0bfab2fc431c14c4d8c6e5b41d2941c9d130cb4894ef99c042392daaeeac00000000',
'010000000a140d07068a12d8dc0b00995dce36389485fb18899e90022a80fd6600000000fa24daa03db95f0e3a181c98e348336cfe94b50c4583e1e795df141bb1643605137c6a49ffff001d140612280101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0108ffffffff0100f2052a010000004341042bd821d9a4e5338eb1298173d75302f3403e778051419dacea3017702ee0d8b9386077b9ac64e36d9e55313c2c2485a5eb2832f9e84a80ca3a3630d717d8ce31ac00000000',
'01000000d77eae89abd94eb4b6ea46b05a711fbd02f49633234ebd28b2943e0f00000000be9e027c64485d4b00ab136704aacd49a966347d35828f8f97226b3907281ac1957f6a49ffff001d28121ef30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0101ffffffff0100f2052a0100000043410477a6b8f2c102e2c701c202347441f4d2520da07fb76669b58be3783218986d9ea61d272d7b2996662408661fc6dfe603b4a3a45ac007dc19c7f167bbb1f0e7b7ac00000000',
'010000008f617fec2ced454f8149c42f4c1f849f7d6792c842ea0737de77696f000000007a7a94c3844949abcedff5ff841f6c2c14a322787ccf59444fcb0fcb23e7ac84e97f6a49ffff001d0394987d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0101ffffffff0100f2052a010000004341045c11045de0c71f97d94a08aed515d5102884f7cf6660766e43278db031e7bc256d96af54c002de4d17028c67ca4681ebd8b48be57573238ea03d40dd24f4dd55ac00000000',
'01000000596a61fd813cf573a8f0333f0012ca0d328f8261731e430a7f3b8aba000000004c703a5af4849dd5bb01d47a4b9263568fd3e3e3515ab52a663c0b8ec9238a2667816a49ffff001d2dcb64620101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a010000004341049d2fb2e919b8a8f1662dd239fbe1ab2fa6cd08885a1aa717eb94484d3583fe0d4a80b7f43ee04188c6a248ec2ed49b38078910d1309ce14459c099778d29fd4aac00000000',
'01000000de6125d98bc1373a22b1e4014e25246202cf847092e22b753750731900000000d8d170cb03bd901495b9c0a9cd689f3ab78f11a1151af4fb3f698099ad26826a3d836a49ffff001d2acc2b620101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0103ffffffff0100f2052a010000004341040c5e37183d6161304c00a1bdf933822f8f0e7a219e70b931fc28cf3bc41e24c86133041f29f1d90dcdfceeaaacb7a203f5fc2bda7149e58c88de66adbe87c69fac00000000',
'01000000696aa63f0f22d9189c8536bb83b18737ae8336c25a67937f79957e5600000000982db9870a5e30d8f0b2a4ebccc5852b5a1e2413e9274c4947bfec6bdaa9b9d75bb76a49ffff001d2b719fdd0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0101ffffffff0100f2052a010000004341045da87c7b825c75ca17ade8bb5cdbcd27af4ce97373aa9848c0c84693ca857cf379e14c2ce61ea2aaee9450d0939e21bd26894aa6dcc808656fa9974dc296589eac00000000',
'0100000055bd840a78798ad0da853f68974f3d183e2bd1db6a842c1feecf222a00000000ff104ccb05421ab93e63f8c3ce5c2c2e9dbb37de2764b3a3175c8166562cac7d51b96a49ffff001d283e9e700201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0102ffffffff0100f2052a01000000434104d46c4968bde02899d2aa0963367c7a6ce34eec332b32e42e5f3407e052d64ac625da6f0718e7b302140434bd725706957c092db53805b821a85b23a7ac61725bac000000000100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1baded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000',
'01000000eea2d48d2fced4346842835c659e493d323f06d4034469a8905714d100000000f293c86973e758ccd11975fa464d4c3e8500979c95425c7be6f0a65314d2f2d5c9ba6a49ffff001d07a8f2260101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010effffffff0100f2052a01000000434104566824c312073315df60e5aa6490b6cdd80cd90f6a8f02e022ca3c2d52968c253006c9c602e03aed7be52d6ac55f5b557c72529bcc3899ace7eb4227153eb44bac00000000',
'01000000e0b4bf8d80026bbec5370a7bb06af54257a9679cef387fab8c53ecc900000000d578b0399b91624a8da53552035fecdd8f4ba2b9c69dfbda68d651fcb9f99c388dbc6a49ffff001d35464c5d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0106ffffffff0100f2052a010000004341044c718603ac207940cfce606b414b42b7cb10abbc714fe44f42f1c10a9990fb0f7202838cfb4fb8512f884ee3e2f47d55992d916880a2c6b46e254d86cd5952b3ac00000000',
'0100000054686892dd112de389acc225accc0118765f9c51c2ec9306f6abefe3000000005209a3e77e3679703f6b7f039fb9e054d7862e6eaad617e8e3f3d81d297e966015be6a49ffff001d21ac03230101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0115ffffffff0100f2052a01000000434104302f0a4688d627c1ac6bf879a805798a0d19e30bfad821704734e94b372849e0c067899275ccddf35743caa9513008d4214b33c31ca69f8da9b8f451ce2e8871ac00000000',
'01000000c585ac476b5878f0f1917826430b3daec278ef28c121c2ec9dd6e9dc000000008195110f0743ab43d4146798c962b8d101e325f4afdf8e936d15c2d51371b9cc7dc06a49ffff001d32915d0f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0113ffffffff0100f2052a0100000043410462cf7569d095ce66c37347e648e553ed5ecbbe145bff328d48a6853ce9394eb100a4bbd294b65308d71024f6eec175fecce22ed5e8acf0e8425c8362921184e8ac00000000',
'01000000c052286e779e7e48397d8c39fee98a3a5718c82dd6bc5b71eebed8a700000000903bb52cc35576a52e9d8f35a901073d33145b6f7be16aab1aa328e8153dfb4874c46a49ffff001d227dd9860101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0107ffffffff0100f2052a010000004341042d5b363ff00d911478e80cd101f7b0fca09126b9a6c98979fa15b251f3accebfd390ffe1132b97d73f37f79da7fdb747b2942d6f7f78c5da9401fd19120f00ceac00000000',
'01000000089d2d7196d00f737762fe82cfd86820c6e44bb2a9dd0f5fc1fc4afd000000005c3de10cb7cb6934b0050360980f9a37a95a8bf705edfbcbd3541591ad95c16466c96a49ffff001d093389660101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0112ffffffff0100f2052a010000004341046d1078f58caef4a77119f7762342d31b1a3ed6b6db4086cf612811fa4141e08df18c0d351727204f54ad428ae7b53007c4f3868a28f91144eabd0c42a9bacc45ac00000000',
'01000000586ebdf7f1df4885ca322a3021773c6281691f9450e8b8edddf3a91600000000885e36844a21fc6078813daa25b0c331523374924d21fd63b2e939ca3fb2b407edce6a49ffff001d0931f1080101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0116ffffffff0100f2052a01000000434104c8a740992e68870a1c31f4b5232ea3c6d21c03b989c28ad50ef2e68ef320780ee3d0c759a369a1d1d7e36b37be9ba4513fb2a3d0840373551a5437536a54db61ac00000000',
'01000000b17df64200cd007eea9b6ac2760f69693f83f19f00352bdd99970c48000000006bf4f1083c14982eee4239a9ac2c94c5672f7da3d763bb88488936a4ac7827672bd16a49ffff001d31f068f50101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0105ffffffff0100f2052a01000000434104c0207ee90e4f760d9839a5049189cc375cc25766588933e5828d6e4395e82522d51526af0c3f45f7c4337dffea82a4899f4277ba734359070fe6dd68abae87a6ac00000000',
'010000000d5ba629a32522334a8d40374b82505533f1f6117c8a906cbee06dca000000006bc3cfaf5339c2989f4892ab10bbbd5ed3db490712b5b72dfd29390ca89178c795d46a49ffff001d37f7ca970101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0116ffffffff0100f2052a010000004341046631c3c98324eb3ee7d9517638f89e8bd5cd020ec50e2e533dead534b4080e1c19c0feaf0e0e344baa5733d4a5085ecefdadc79d68deadffa2b1ea7f71365cdbac00000000',
'0100000070e12562bd8d2d8b2c1d298fbaa3bc4f005b4c41692850276b5aabc0000000004f1d6988f3aed27c24bcdd92ed9296afb0d58073f77da34caa8cc83718fe8cbd3cda6a49ffff001d13bfb72f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011fffffffff0100f2052a01000000434104547bd2940927dfa17ecf168496be4a9ad452784750addf355c0d9a43f12b98404376c7b062d2eb739d481da106f96a44b877bbb8203e204c9798c22db6eca7c0ac00000000',
'01000000f2c8a8d2af43a9cd05142654e56f41d159ce0274d9cabe15a20eefb500000000366c2a0915f05db4b450c050ce7165acd55f823fee51430a8c993e0bdbb192ede5dc6a49ffff001d192d3f2f0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0128ffffffff0100f2052a0100000043410435f0d8366085f73906a48309728155532f24293ea59fe0b33a245c4b8d75f82c3e70804457b7f49322aa822196a7521e4931f809d7e489bccb4ff14758d170e5ac000000000100000001169e1e83e930853391bc6f35f605c6754cfead57cf8387639d3b4096c54f18f40100000048473044022027542a94d6646c51240f23a76d33088d3dd8815b25e9ea18cac67d1171a3212e02203baf203c6e7b80ebd3e588628466ea28be572fe1aaa3f30947da4763dd3b3d2b01ffffffff0200ca9a3b00000000434104b5abd412d4341b45056d3e376cd446eca43fa871b51961330deebd84423e740daa520690e1d9e074654c59ff87b408db903649623e86f1ca5412786f61ade2bfac005ed0b20000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000',
'01000000e5c6af65c46bd826723a83c1c29d9efa189320458dc5298a0c8655dc0000000030c2a0d34bfb4a10d35e8166e0f5a37bce02fc1b85ff983739a191197f010f2f40df6a49ffff001d2ce7ac9e0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0129ffffffff0100f2052a01000000434104b10dd882c04204481116bd4b41510e98c05a869af51376807341fc7e3892c9034835954782295784bfc763d9736ed4122c8bb13d6e02c0882cb7502ce1ae8287ac000000000100000001be141eb442fbc446218b708f40caeb7507affe8acff58ed992eb5ddde43c6fa1010000004847304402201f27e51caeb9a0988a1e50799ff0af94a3902403c3ad4068b063e7b4d1b0a76702206713f69bd344058b0dee55a9798759092d0916dbbc3e592fee43060005ddc17401ffffffff0200e1f5050000000043410401518fa1d1e1e3e162852d68d9be1c0abad5e3d6297ec95f1f91b909dc1afe616d6876f92918451ca387c4387609ae1a895007096195a824baf9c38ea98c09c3ac007ddaac0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000',
'010000005dad27b228dac0272b484c390c32d95aaa38e75ba9f74ffc1178485400000000292571e03a414e493790a4bc212dac24d5d6cd5655cbefb4404dd8513b9825df6ee46a49ffff001d13fdd3c00201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d012affffffff0100f2052a010000004341041915d670aa55621ed1c438477c5da654344e10eecea90ede3a048103fc3cf4beccda70ddf71ce55a5083b7bade4319d159b44374234590e9296cbe08c67774b2ac00000000010000000173805864da01f15093f7837607ab8be7c3705e29a9d4a12c9116d709f8911e590100000049483045022052ffc1929a2d8bd365c6a2a4e3421711b4b1e1b8781698ca9075807b4227abcb0221009984107ddb9e3813782b095d0d84361ed4c76e5edaf6561d252ae162c2341cfb01ffffffff0200e1f50500000000434104baa9d36653155627c740b3409a734d4eaf5dcca9fb4f736622ee18efcf0aec2b758b2ec40db18fbae708f691edb2d4a2a3775eb413d16e2e3c0f8d4c69119fd1ac009ce4a60000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000',
'010000008898a2630f7fe0b924cf5b986c8a8da2b2959a2d6faf8b033f516ef400000000bf20f3ca28996db0f2f884ef15a03ff53ba6ad5669ed4e14c861d5dd56a16172e5e76a49ffff001d2e11190a0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0102ffffffff0100f2052a01000000434104b03e3014aba0d36e8444e20f93c0b03de4e5eb18aad8311191bce383cf84fa4dc76c28bb3d8e95b5e2267a307d87843db3fc3f7cb772f1dd16234295f08ad199ac00000000',
'0100000046240a842144b1583595716102ffc02afede6696ee0f763c4e2f86ff00000000eedc47affffe3a58b9e90e1b82013b695a1ee4db3b40f0a5cf00e092cb41df315aea6a49ffff001d1b7d5c800101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0111ffffffff0100f2052a01000000434104002170255782f1405cc0fd2253d15e8561930efcc0aaa6481487f2e799be7b3cca68f50628c10ae963e044c4eb66c1055a84b303fa9fb068700e2af070cfe07eac00000000',
'01000000ef73923157421b892f07214e80eebf6b0a9503f8e6673bf6f38d4be2000000002f7c7b0c58bd33eef2b77ffd707cc44cdaa2524af9606d29cd03649d3491cb21b5ea6a49ffff001db033dd000101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0102ffffffff0100f2052a01000000434104a8c5bdf4b5a4d77da3782aa5434b8418811b1dbe269fb05614c556c58d53a927c6ca41909ae493ac68b5e9495900ff378805b638e12b582f60a694b3bf892323ac00000000',
'01000000bed482ccb42bf5c20d00a5bb9f7d688e97b94c622a7f42f3aaf23f8b000000001cafcb3e4cad2b4eed7fb7fcb7e49887d740d66082eb45981194c532b58d475258ee6a49ffff001d1bc0e2320201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011affffffff0100f2052a0100000043410435d66d6cef63a3461110c810975b8816308372b58274d88436a974b478d98d8d972f7233ea8a5242d151de9d4b1ac11a6f7f8460e8f9b146d97c7bad980cc5ceac000000000100000001ba91c1d5e55a9e2fab4e41f55b862a73b24719aad13a527d169c1fad3b63b5120000000048473044022041d56d649e3ca8a06ffc10dbc6ba37cb958d1177cc8a155e83d0646cd5852634022047fd6a02e26b00de9f60fb61326856e66d7a0d5e2bc9d01fb95f689fc705c04b01ffffffff0100e1f50500000000434104fe1b9ccf732e1f6b760c5ed3152388eeeadd4a073e621f741eb157e6a62e3547c8e939abbd6a513bf3a1fbe28f9ea85a4e64c526702435d726f7ff14da40bae4ac00000000',
'010000005d496e62546b36558c139bd429d277bd00f3ec379888169115e2cdb200000000375bd2a0ab09dd7911c8bedf47834babd51a929d7226d8cd1908f47c0a496d3aa9ef6a49ffff001d197c3bd90101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0103ffffffff0100f2052a0100000043410449fff9665bfda43017a27b3d32e986378befdd6fa5d4eb097626701ace807a2b3a43e74375dce4ed9028b3b62ba8485358cd48967e854a857a38ecdbfe5b62f8ac00000000',
'0100000093f117e1d73ee60eb67360d3be4243c14e5f40b8cba7f389b30d949a000000005e6f2b29966c399124c09a17e7922e17491bdc20d877a05ec5193c5965696c5fa5f06a49ffff001d26aab3660101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010bffffffff0100f2052a0100000043410468b02e8cc3c5905dff8f831cdcf8dc47a093f820163bb62efc12bd0dd6cdd63ec6dcc7c121e4fb7795f5ae241d00167390eeb0fed7d6154e2a21da50d8f977a8ac00000000',
'010000007829db45b94dc80ce11ec51d05710cbeacaac839c3560cc1cc0db38b00000000352f966d224cbdbe797d75831458c63e93019acd98e2f704f47171d9b54c7503ddf16a49ffff001d05d324e30101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010affffffff0100f2052a010000004341043c13da0878491a5ec1b31490c22c79cab18b43feff1cb0bf7a8c39c5446f60dac4ac0f1a62e230aaf72368fdb57e9cb3eb58fd019600ff41f3df0192f1da2e19ac00000000',
'0100000035158869187e847cda0d32280015060f87141c57f4d63f2c59c3317d00000000b1ad99b3de17d2b87f6234843deb5a49454bc889498d24e3cac05052c15000ebfef16a49ffff001d25bbd0b00101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0121ffffffff0100f2052a01000000434104ef870ee124a68e999508eda763ddf0152c2684a9d74d7f07d0af3b8c7192e98277d0dc3247f10b75fc32207f71cfeadb01337d0c8c540970679102a5f9547c76ac00000000',
'01000000ab3249dbb0ec7a1ecf9383cfcf20e89acebc9124313f3f4f61f30b5200000000bd444a225b8474cc205ab13fcad732afd5967fb54814696b638f07d4a63d29165cf86a49ffff001d1f9afafd0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010dffffffff0100f2052a01000000434104688a6734155a81a8fd9093a55b16bc5c50242f7ead7e0070c91db438fd15f80bd2841af71ee62213bd2022276edf646957546537a54ceb3a7b478e7c07be213bac00000000',
'01000000360bc1a14a967fd75d7ff6fbf412f40e25efb6ed5ddc174f10a6350a000000006459a7a18e345d1297ad46328d519f5321607f4f68842eea6338443c2c0e47107ff96a49ffff001d1b1cc7ee0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d010fffffffff0100f2052a01000000434104a8cb399580117eb940d0fe7d1f7ec3f8bd0b13b9cc3d2e01df5072a000534f7713a7d57dc06e4538a62784688e83ec77a062cc2d404ac9f9a70774935edce4cbac00000000',
'0100000096960b9108d868524f9e5dc8445420694699b4964a9143c5becc3c83000000002914dd3668b3b9a3b8be41abd4b19891326376ff0bbf6cb267586d43f5d1eda704fb6a49ffff001d35fdbf820101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0111ffffffff0100f2052a0100000043410436a50bc64a0d10e2f7cbdac937aa2b5c01b4d275026b1cd95d71960a3fb95599155e43fb7e27ca3ac3b9c423be8e8a3aa1ab81873d15819e0d1fa8dbacde24c1ac00000000',
'01000000e73ec9fc17a74e1bf3cc13b864f43aebb3cc38688ead29beb70b689500000000df44c7db330d4f72049206aec20ba3ba0bc40fe8ef869f6424f20ee13d3e204969006b49ffff001d31626c7b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011affffffff0100f2052a0100000043410427751e9318a2aa1120d60e6cdc0cd3f6795a6fc5e2f7e7fc678c7fc7339c2fc844e6860d49f49ae49aedf527268b3dd62f2087dfb7e2e2a2c0cc976670383ad1ac00000000',
'01000000fec8cfb1d8ec7924ec65f74c4e3cff4cbb43f6ec639c5c9dae97b968000000002192402c287b8d63b6f417d5285e42405727c06d7f217251c8328c2821353c3f9e036b49ffff001d32f918300101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011cffffffff0100f2052a010000004341045111f442ff6e4a80d31a127a77e9bfa33652bb719d1df7a59cdbae51de312d1a80ddd343f579ca4b2146a47b492ce6600ec53a9e5a7936e9af16ccf1dd7072ffac00000000',
'0100000010e3bf77a047b400f882ebc26190c347074a6b5a7aee8fd67761209100000000cb2d57f524b9dbcc51a990a86fc9c07943066324a6cf4c2ae873b4698d23987a90086b49ffff001d25e04f950101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d011fffffffff0100f2052a0100000043410441f84af9b0d434a654d041e22314144621b395f166e01abb2df9d603d7d838fc44a2a52276fe747833d737e80edd5ba0b9d5b5000985e5392463337e129ea676ac00000000',
'0100000014b89d2cb8c442342ae166f29b09ff9681b2d681dc8692f5371927ef000000000c25033f4632230ab97c43b37fb6a8fe25decab5642ca31ab3a2f9709b115384e10b6b49ffff001d1d7d9d2f0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0124ffffffff0100f2052a010000004341040b91eef9fcaea65e13c2821ae70f1aa428e0c9f2406f371f862309247f8a842ad7d9b72c95d7f556da274c9afdf6cfd238be861e77cf760fd99de3f67dff9142ac00000000',
'0100000043e1769206b487a5177b71abaaeccfacefd5654ab0767146c15854e80000000028fa7465bf717c2b27e570363ab4e1c10c4abfad031beb60543062df39b99477350e6b49ffff001d18ed76890101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0130ffffffff0100f2052a010000004341044ab32bf7291275b7e615a79c9a22a63631a131bb926db5c1fc6c4986ad02800bbf19c275377da071bb37fbbc50cf7a2e95b5b8ab68240ae2ec9a9c094a771359ac00000000',
'01000000eb68047fb29d78480b567ef6b76be556a2ec975656424508cc1c69b700000000bad58718fc3c6f5474918f06c44400c70b4c86d55a3f3ca3493b1d40c2061f2ba00f6b49ffff001d064b3a6d0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0138ffffffff0100f2052a010000004341045e071dedd1ed03721c6e9bba28fc276795421a378637fb41090192bb9f208630dcbac5862a3baeb9df3ca6e4e256b7fd2404824c20198ca1b004ee2197866433ac00000000',
]
|
n00bsys0p/altcoin-abe
|
test/test_btc200.py
|
Python
|
agpl-3.0
| 98,158
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2006 ACYSOS S.L. (http://acysos.com) All Rights Reserved.
# Pedro Tarrafeta <pedro@acysos.com>
# Copyright (c) 2008 Pablo Rocandio. All Rights Reserved.
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jordi Esteve <jesteve@zikzakmedia.com>
# Copyright (c) 2013 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# $Id$
#
# Corregido para instalación TinyERP estándar 4.2.0: Zikzakmedia S.L. 2008
# Jordi Esteve <jesteve@zikzakmedia.com>
#
# Añadidas cuentas de remesas y tipos de pago. 2008
# Pablo Rocandio <salbet@gmail.com>
#
# Rehecho de nuevo para instalación OpenERP 5.0.0 sobre account_payment_extension: Zikzakmedia S.L. 2009
# Jordi Esteve <jesteve@zikzakmedia.com>
#
# Añadidos conceptos extras del CSB 19: Acysos S.L. 2011
# Ignacio Ibeas <ignacio@acysos.com>
#
# Refactorización. Acysos S.L. (http://www.acysos.com) 2012
# Ignacio Ibeas <ignacio@acysos.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from datetime import datetime
from tools.translate import _
from log import *
class csb_19(osv.osv):
_name = 'csb.19'
_auto = False
def _cabecera_presentador_19(self,cr,uid):
converter = self.pool.get('payment.converter.spain')
texto = '5180'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += datetime.today().strftime('%d%m%y')
texto += 6*' '
texto += converter.to_ascii(cr,uid,self.order.mode.nombre).ljust(40)
texto += 20*' '
cc = converter.digits_only(cr,uid,self.order.mode.bank_id.acc_number)
texto += cc[0:8]
texto += 66*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Cabecera presentador 19', texto), True)
return texto
def _cabecera_ordenante_19(self,cr,uid, recibo=None):
converter = self.pool.get('payment.converter.spain')
texto = '5380'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += datetime.today().strftime('%d%m%y')
if self.order.date_prefered == 'due':
assert recibo
if recibo.get('date'):
date_cargo = datetime.strptime(recibo['date'],'%Y-%m-%d')
elif recibo.get('ml_maturity_date'):
date_cargo = datetime.strptime(recibo['ml_maturity_date'],'%Y-%m-%d')
else:
date_cargo = datetime.today()
elif self.order.date_prefered == 'now':
date_cargo = datetime.today()
else: # self.order.date_prefered == 'fixed'
if not self.order.date_scheduled:
raise Log(_('User error:\n\nFixed date of charge has not been defined.'), True)
date_cargo = datetime.strptime(self.order.date_scheduled,'%Y-%m-%d')
texto += date_cargo.strftime('%d%m%y')
texto += converter.to_ascii(cr,uid,self.order.mode.nombre).ljust(40)
cc = converter.digits_only(cr,uid,self.order.mode.bank_id.acc_number)
texto += cc[0:20]
texto += 8*' '
texto += '01'
texto += 64*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Cabecera ordenante 19', texto), True)
return texto
def _individual_obligatorio_19(self,cr,uid, recibo):
converter = self.pool.get('payment.converter.spain')
texto = '5680'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += str(recibo['name'])[-12:].zfill(12)
nombre = converter.to_ascii(cr,uid,recibo['partner_id'].name)
texto += nombre[0:40].ljust(40)
ccc = recibo['bank_id'] and recibo['bank_id'].acc_number or ''
ccc = converter.digits_only(cr,uid,ccc)
texto += str(ccc)[0:20].zfill(20)
importe = int(round(abs(recibo['amount'])*100,0))
texto += str(importe).zfill(10)
###### Referencia para devolución (sólo válida si no se agrupa) ######
if len(recibo['ml_inv_ref']) == 1:
texto += str(recibo['ml_inv_ref'][0].id)[-16:].zfill(16)
else:
texto += 16*' '
######################################################################
concepto = ''
if recibo['communication']:
concepto = recibo['communication']
texto += converter.to_ascii(cr,uid,concepto)[0:48].ljust(48)
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Individual obligatorio 19', texto), True)
return texto
def _individual_opcional_19(self,cr,uid, recibo):
"""Para poner el segundo texto de comunicación (en lugar de nombre, domicilio y localidad opcional)"""
converter = self.pool.get('payment.converter.spain')
texto = '5686'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += str(recibo['name'])[-12:].zfill(12)
texto += converter.to_ascii(cr,uid,recibo['communication2'])[0:115].ljust(115)
texto += '00000' # Campo de código postal ficticio
texto += 14*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Individual opcional 19', texto), True)
return texto
def _extra_opcional_19(self,cr,uid, recibo):
"""Para poner los 15 conceptos opcional de los registros 5681-5685 utilizando las lineas de facturación (Máximo 15 lineas)"""
converter = self.pool.get('payment.converter.spain')
res = {}
res['texto'] = ''
res['total_lines'] = 0
counter = 1
registry_counter = 1
length = 0
for invoice in recibo['ml_inv_ref']:
if invoice:
length += len(invoice.invoice_line)
for invoice in recibo['ml_inv_ref']:
if invoice:
for invoice_line in invoice.invoice_line:
if counter <= length:
if counter <= 15:
if (counter-1)%3 == 0:
res['texto'] += '568'+str(registry_counter)
res['texto'] += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
res['texto'] += str(recibo['name']).zfill(12)
price = ' %(#).2f ' % {'#' : invoice_line.price_subtotal}
res['texto'] += converter.to_ascii(cr,uid,invoice_line.name)[0:(40-len(price))].ljust(40-len(price))
res['texto'] += converter.to_ascii(cr,uid,price.replace('.',','))
if counter % 3 == 0:
res['texto'] += 14*' '+'\r\n'
res['total_lines'] += 1
if len(res['texto']) != registry_counter*164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Individual opcional 19', res['texto']), True)
registry_counter += 1
elif counter == length:
res['texto'] += (3-(counter % 3))*40*' '+14*' '+'\r\n'
res['total_lines'] += 1
if len(res['texto']) != registry_counter*164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Individual opcional 19', res['texto']), True)
counter += 1
return res
def _total_ordenante_19(self,cr,uid):
texto = '5880'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += 72*' '
totalordenante = int(round(abs(self.group_amount) * 100,0))
texto += str(totalordenante).zfill(10)
texto += 6*' '
texto += str(self.group_payments).zfill(10)
texto += str(self.group_payments + self.group_optional_lines + 2).zfill(10)
texto += 38*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Total ordenante 19', texto), True)
return texto
def _total_general_19(self,cr,uid):
texto = '5980'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += 52*' '
if self.order.date_prefered == 'due':
# Tantos ordenantes como pagos
texto += str(self.total_payments).zfill(4)
else:
# Sólo un ordenante
texto += '0001'
texto += 16*' '
totalremesa = int(round(abs(self.order.total) * 100,0))
texto += str(totalremesa).zfill(10)
texto += 6*' '
texto += str(self.total_payments).zfill(10)
if self.order.date_prefered == 'due':
# Tantos ordenantes como pagos
texto += str(self.total_payments*3 + self.total_optional_lines + 2).zfill(10)
else:
# Sólo un ordenante
texto += str(self.total_payments + self.total_optional_lines + 4).zfill(10)
texto += 38*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Total general 19', texto), True)
return texto
def create_file(self, cr, uid, order, lines, context):
self.order = order
txt_remesa = ''
self.total_payments = 0
self.total_optional_lines = 0
self.group_payments = 0
self.group_optional_lines = 0
self.group_amount = 0.0
txt_remesa += self._cabecera_presentador_19(cr,uid)
if order.date_prefered == 'due':
# Tantos ordenantes como pagos
for recibo in lines:
self.group_payments = 0
self.group_optional_lines = 0
self.group_amount = 0.0
txt_remesa += self._cabecera_ordenante_19(cr,uid,recibo)
txt_remesa += self._individual_obligatorio_19(cr,uid,recibo)
self.total_payments += 1
self.group_payments += 1
self.group_amount += abs( recibo['amount'] )
if order.mode.csb19_extra_concepts:
extra_concepts = self._extra_opcional_19(cr,uid,recibo)
txt_remesa += extra_concepts['texto']
self.total_optional_lines += extra_concepts['total_lines']
self.group_optional_lines += extra_concepts['total_lines']
if recibo['communication2']:
txt_remesa += self._individual_opcional_19(cr,uid,recibo)
self.total_optional_lines += 1
self.group_optional_lines += 1
txt_remesa += self._total_ordenante_19(cr,uid)
else:
# Sólo un ordenante
txt_remesa += self._cabecera_ordenante_19(cr,uid)
self.group_payments = 0
self.group_optional_lines = 0
self.group_amount = 0.0
for recibo in lines:
txt_remesa += self._individual_obligatorio_19(cr,uid,recibo)
self.total_payments += 1
self.group_payments += 1
self.group_amount += abs( recibo['amount'] )
if order.mode.csb19_extra_concepts:
extra_concepts = self._extra_opcional_19(cr,uid,recibo)
txt_remesa += extra_concepts['texto']
self.total_optional_lines += extra_concepts['total_lines']
self.group_optional_lines += extra_concepts['total_lines']
if recibo['communication2']:
txt_remesa += self._individual_opcional_19(cr,uid,recibo)
self.total_optional_lines += 1
self.group_optional_lines += 1
txt_remesa += self._total_ordenante_19(cr,uid)
txt_remesa += self._total_general_19(cr,uid)
return txt_remesa
csb_19()
|
jmesteve/openerpseda
|
openerp/addons/l10n_es_payment_order/wizard/csb_19.py
|
Python
|
agpl-3.0
| 13,523
|
import radon.complexity
import radon.visitors
from coalib.bears.LocalBear import LocalBear
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.SourceRange import SourceRange
from coalib.settings.Setting import typed_list
class RadonBear(LocalBear):
def run(self, filename, file,
radon_ranks_info: typed_list(str)=(),
radon_ranks_normal: typed_list(str)=('C', 'D'),
radon_ranks_major: typed_list(str)=('E', 'F')):
"""
Uses radon to compute complexity of a given file.
:param radon_ranks_info: The ranks (given by radon) to
treat as severity INFO.
:param radon_ranks_normal: The ranks (given by radon) to
treat as severity NORMAL.
:param radon_ranks_major: The ranks (given by radon) to
treat as severity MAJOR.
"""
severity_map = {
RESULT_SEVERITY.INFO: radon_ranks_info,
RESULT_SEVERITY.NORMAL: radon_ranks_normal,
RESULT_SEVERITY.MAJOR: radon_ranks_major
}
for visitor in radon.complexity.cc_visit("".join(file)):
rank = radon.complexity.cc_rank(visitor.complexity)
severity = None
for result_severity, rank_list in severity_map.items():
if rank in rank_list:
severity = result_severity
if severity is None:
continue
visitor_range = SourceRange.from_values(
filename, visitor.lineno, visitor.col_offset, visitor.endline)
message = "{} has a cyclomatic complexity of {}".format(
visitor.name, rank)
yield Result(self, message, severity=severity,
affected_code=(visitor_range,))
|
sims1253/coala-bears
|
bears/python/RadonBear.py
|
Python
|
agpl-3.0
| 1,902
|
"""
Instructor Views
"""
## NOTE: This is the code for the legacy instructor dashboard
## We are no longer supporting this file or accepting changes into it.
from contextlib import contextmanager
import csv
import json
import logging
import os
import re
import requests
from collections import defaultdict, OrderedDict
from markupsafe import escape
from requests.status_codes import codes
from StringIO import StringIO
from django.conf import settings
from django.contrib.auth.models import User
from django.http import HttpResponse
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.cache import cache_control
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from django.utils import timezone
from xmodule_modifiers import wrap_xblock, request_token
import xmodule.graders as xmgraders
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.html_module import HtmlDescriptor
from opaque_keys import InvalidKeyError
from lms.lib.xblock.runtime import quote_slashes
from submissions import api as sub_api # installed from the edx-submissions repository
from bulk_email.models import CourseEmail, CourseAuthorization
from courseware import grades
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_cms_course_link
from student.roles import (
CourseStaffRole, CourseInstructorRole, CourseBetaTesterRole, GlobalStaff
)
from courseware.models import StudentModule
from django_comment_common.models import (
Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA
)
from django_comment_client.utils import has_forum_access
from instructor.offline_gradecalc import student_grades, offline_grades_available
from instructor.views.tools import strip_if_string, bulk_email_is_enabled_for_course
from instructor_task.api import (
get_running_instructor_tasks,
get_instructor_task_history,
submit_rescore_problem_for_all_students,
submit_rescore_problem_for_student,
submit_reset_problem_attempts_for_all_students,
submit_bulk_course_email
)
from instructor_task.views import get_task_completion_info
from edxmako.shortcuts import render_to_response, render_to_string
from class_dashboard import dashboard_data
from psychometrics import psychoanalyze
from student.models import (
CourseEnrollment,
CourseEnrollmentAllowed,
unique_id_for_user,
anonymous_id_for_user
)
import track.views
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from django.utils.translation import ugettext as _
from microsite_configuration import microsite
from opaque_keys.edx.locations import i4xEncoder
log = logging.getLogger(__name__)
# internal commands for managing forum roles:
FORUM_ROLE_ADD = 'add'
FORUM_ROLE_REMOVE = 'remove'
# For determining if a shibboleth course
SHIBBOLETH_DOMAIN_PREFIX = 'shib:'
def split_by_comma_and_whitespace(a_str):
"""
Return string a_str, split by , or whitespace
"""
return re.split(r'[\s,]', a_str)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
def instructor_dashboard(request, course_id):
"""Display the instructor dashboard for a course."""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(request.user, 'staff', course_key, depth=None)
instructor_access = has_access(request.user, 'instructor', course) # an instructor can manage staff lists
forum_admin_access = has_forum_access(request.user, course_key, FORUM_ROLE_ADMINISTRATOR)
msg = ''
email_msg = ''
email_to_option = None
email_subject = None
html_message = ''
show_email_tab = False
problems = []
plots = []
datatable = {}
# the instructor dashboard page is modal: grades, psychometrics, admin
# keep that state in request.session (defaults to grades mode)
idash_mode = request.POST.get('idash_mode', '')
idash_mode_key = u'idash_mode:{0}'.format(course_id)
if idash_mode:
request.session[idash_mode_key] = idash_mode
else:
idash_mode = request.session.get(idash_mode_key, 'Grades')
enrollment_number = CourseEnrollment.num_enrolled_in(course_key)
# assemble some course statistics for output to instructor
def get_course_stats_table():
datatable = {
'header': ['Statistic', 'Value'],
'title': _('Course Statistics At A Glance'),
}
data = [['# Enrolled', enrollment_number]]
data += [['Date', timezone.now().isoformat()]]
data += compute_course_stats(course).items()
if request.user.is_staff:
for field in course.fields.values():
if getattr(field.scope, 'user', False):
continue
data.append([
field.name,
json.dumps(field.read_json(course), cls=i4xEncoder)
])
datatable['data'] = data
return datatable
def return_csv(func, datatable, file_pointer=None):
"""Outputs a CSV file from the contents of a datatable."""
if file_pointer is None:
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = (u'attachment; filename={0}'.format(func)).encode('utf-8')
else:
response = file_pointer
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
encoded_row = [unicode(s).encode('utf-8') for s in datatable['header']]
writer.writerow(encoded_row)
for datarow in datatable['data']:
# 's' here may be an integer, float (eg score) or string (eg student name)
encoded_row = [
# If s is already a UTF-8 string, trying to make a unicode
# object out of it will fail unless we pass in an encoding to
# the constructor. But we can't do that across the board,
# because s is often a numeric type. So just do this.
s if isinstance(s, str) else unicode(s).encode('utf-8')
for s in datarow
]
writer.writerow(encoded_row)
return response
def get_student_from_identifier(unique_student_identifier):
"""Gets a student object using either an email address or username"""
unique_student_identifier = strip_if_string(unique_student_identifier)
msg = ""
try:
if "@" in unique_student_identifier:
student = User.objects.get(email=unique_student_identifier)
else:
student = User.objects.get(username=unique_student_identifier)
msg += _("Found a single student. ")
except User.DoesNotExist:
student = None
msg += "<font color='red'>{text}</font>".format(
text=_("Couldn't find student with that email or username.")
)
return msg, student
# process actions from form POST
action = request.POST.get('action', '')
use_offline = request.POST.get('use_offline_grades', False)
if settings.FEATURES['ENABLE_MANUAL_GIT_RELOAD']:
if 'GIT pull' in action:
data_dir = course.data_dir
log.debug('git pull {0}'.format(data_dir))
gdir = settings.DATA_DIR / data_dir
if not os.path.exists(gdir):
msg += "====> ERROR in gitreload - no such directory {0}".format(gdir)
else:
cmd = "cd {0}; git reset --hard HEAD; git clean -f -d; git pull origin; chmod g+w course.xml".format(gdir)
msg += "git pull on {0}:<p>".format(data_dir)
msg += "<pre>{0}</pre></p>".format(escape(os.popen(cmd).read()))
track.views.server_track(request, "git-pull", {"directory": data_dir}, page="idashboard")
if 'Reload course' in action:
log.debug('reloading {0} ({1})'.format(course_key, course))
try:
data_dir = course.data_dir
modulestore().try_load_course(data_dir)
msg += "<br/><p>Course reloaded from {0}</p>".format(data_dir)
track.views.server_track(request, "reload", {"directory": data_dir}, page="idashboard")
course_errors = modulestore().get_course_errors(course.id)
msg += '<ul>'
for cmsg, cerr in course_errors:
msg += "<li>{0}: <pre>{1}</pre>".format(cmsg, escape(cerr))
msg += '</ul>'
except Exception as err: # pylint: disable=broad-except
msg += '<br/><p>Error: {0}</p>'.format(escape(err))
if action == 'Dump list of enrolled students' or action == 'List enrolled students':
log.debug(action)
datatable = get_student_grade_summary_data(request, course, get_grades=False, use_offline=use_offline)
datatable['title'] = _('List of students enrolled in {course_key}').format(course_key=course_key.to_deprecated_string())
track.views.server_track(request, "list-students", {}, page="idashboard")
elif 'Dump Grades' in action:
log.debug(action)
datatable = get_student_grade_summary_data(request, course, get_grades=True, use_offline=use_offline)
datatable['title'] = _('Summary Grades of students enrolled in {course_key}').format(course_key=course_key.to_deprecated_string())
track.views.server_track(request, "dump-grades", {}, page="idashboard")
elif 'Dump all RAW grades' in action:
log.debug(action)
datatable = get_student_grade_summary_data(request, course, get_grades=True,
get_raw_scores=True, use_offline=use_offline)
datatable['title'] = _('Raw Grades of students enrolled in {course_key}').format(course_key=course_key)
track.views.server_track(request, "dump-grades-raw", {}, page="idashboard")
elif 'Download CSV of all student grades' in action:
track.views.server_track(request, "dump-grades-csv", {}, page="idashboard")
return return_csv('grades_{0}.csv'.format(course_key.to_deprecated_string()),
get_student_grade_summary_data(request, course, use_offline=use_offline))
elif 'Download CSV of all RAW grades' in action:
track.views.server_track(request, "dump-grades-csv-raw", {}, page="idashboard")
return return_csv('grades_{0}_raw.csv'.format(course_key.to_deprecated_string()),
get_student_grade_summary_data(request, course, get_raw_scores=True, use_offline=use_offline))
elif 'Download CSV of answer distributions' in action:
track.views.server_track(request, "dump-answer-dist-csv", {}, page="idashboard")
return return_csv('answer_dist_{0}.csv'.format(course_key.to_deprecated_string()), get_answers_distribution(request, course_key))
elif 'Dump description of graded assignments configuration' in action:
# what is "graded assignments configuration"?
track.views.server_track(request, "dump-graded-assignments-config", {}, page="idashboard")
msg += dump_grading_context(course)
elif "Rescore ALL students' problem submissions" in action:
problem_location_str = strip_if_string(request.POST.get('problem_for_all_students', ''))
try:
problem_location = course_key.make_usage_key_from_deprecated_string(problem_location_str)
instructor_task = submit_rescore_problem_for_all_students(request, problem_location)
if instructor_task is None:
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for rescoring "{problem_url}".').format(
problem_url=problem_location_str
)
)
else:
track.views.server_track(
request,
"rescore-all-submissions",
{
"problem": problem_location_str,
"course": course_key.to_deprecated_string()
},
page="idashboard"
)
except (InvalidKeyError, ItemNotFoundError) as err:
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for rescoring "{problem_url}": problem not found.').format(
problem_url=problem_location_str
)
)
except Exception as err: # pylint: disable=broad-except
log.error("Encountered exception from rescore: {0}".format(err))
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for rescoring "{url}": {message}.').format(
url=problem_location_str, message=err.message
)
)
elif "Reset ALL students' attempts" in action:
problem_location_str = strip_if_string(request.POST.get('problem_for_all_students', ''))
try:
problem_location = course_key.make_usage_key_from_deprecated_string(problem_location_str)
instructor_task = submit_reset_problem_attempts_for_all_students(request, problem_location)
if instructor_task is None:
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for resetting "{problem_url}".').format(problem_url=problem_location_str)
)
else:
track.views.server_track(
request,
"reset-all-attempts",
{
"problem": problem_location_str,
"course": course_key.to_deprecated_string()
},
page="idashboard"
)
except (InvalidKeyError, ItemNotFoundError) as err:
log.error('Failure to reset: unknown problem "{0}"'.format(err))
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for resetting "{problem_url}": problem not found.').format(
problem_url=problem_location_str
)
)
except Exception as err: # pylint: disable=broad-except
log.error("Encountered exception from reset: {0}".format(err))
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for resetting "{url}": {message}.').format(
url=problem_location_str, message=err.message
)
)
elif "Show Background Task History for Student" in action:
# put this before the non-student case, since the use of "in" will cause this to be missed
unique_student_identifier = request.POST.get('unique_student_identifier', '')
message, student = get_student_from_identifier(unique_student_identifier)
if student is None:
msg += message
else:
problem_location_str = strip_if_string(request.POST.get('problem_for_student', ''))
try:
problem_location = course_key.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
msg += '<font color="red">{text}</font>'.format(
text=_('Could not find problem location "{url}".').format(
url=problem_location_str
)
)
else:
message, datatable = get_background_task_table(course_key, problem_location, student)
msg += message
elif "Show Background Task History" in action:
problem_location_str = strip_if_string(request.POST.get('problem_for_all_students', ''))
try:
problem_location = course_key.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
msg += '<font color="red">{text}</font>'.format(
text=_('Could not find problem location "{url}".').format(
url=problem_location_str
)
)
else:
message, datatable = get_background_task_table(course_key, problem_location)
msg += message
elif ("Reset student's attempts" in action or
"Delete student state for module" in action or
"Rescore student's problem submission" in action):
# get the form data
unique_student_identifier = request.POST.get(
'unique_student_identifier', ''
)
problem_location_str = strip_if_string(request.POST.get('problem_for_student', ''))
try:
module_state_key = course_key.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
msg += '<font color="red">{text}</font>'.format(
text=_('Could not find problem location "{url}".').format(
url=problem_location_str
)
)
else:
# try to uniquely id student by email address or username
message, student = get_student_from_identifier(unique_student_identifier)
msg += message
student_module = None
if student is not None:
# Reset the student's score in the submissions API
# Currently this is used only by open assessment (ORA 2)
# We need to do this *before* retrieving the `StudentModule` model,
# because it's possible for a score to exist even if no student module exists.
if "Delete student state for module" in action:
try:
sub_api.reset_score(
anonymous_id_for_user(student, course_key),
course_key.to_deprecated_string(),
module_state_key.to_deprecated_string(),
)
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
msg += "<font color='red'>{err}</font> ".format(err=error_msg)
# find the module in question
try:
student_module = StudentModule.objects.get(
student_id=student.id,
course_id=course_key,
module_state_key=module_state_key
)
msg += _("Found module. ")
except StudentModule.DoesNotExist as err:
error_msg = _("Couldn't find module with that urlname: {url}. ").format(url=problem_location_str)
msg += "<font color='red'>{err_msg} ({err})</font>".format(err_msg=error_msg, err=err)
log.debug(error_msg)
if student_module is not None:
if "Delete student state for module" in action:
# delete the state
try:
student_module.delete()
msg += "<font color='red'>{text}</font>".format(
text=_("Deleted student module state for {state}!").format(state=module_state_key)
)
event = {
"problem": problem_location_str,
"student": unique_student_identifier,
"course": course_key.to_deprecated_string()
}
track.views.server_track(
request,
"delete-student-module-state",
event,
page="idashboard"
)
except Exception as err: # pylint: disable=broad-except
error_msg = _("Failed to delete module state for {id}/{url}. ").format(
id=unique_student_identifier, url=problem_location_str
)
msg += "<font color='red'>{err_msg} ({err})</font>".format(err_msg=error_msg, err=err)
log.exception(error_msg)
elif "Reset student's attempts" in action:
# modify the problem's state
try:
# load the state json
problem_state = json.loads(student_module.state)
old_number_of_attempts = problem_state["attempts"]
problem_state["attempts"] = 0
# save
student_module.state = json.dumps(problem_state)
student_module.save()
event = {
"old_attempts": old_number_of_attempts,
"student": unicode(student),
"problem": student_module.module_state_key,
"instructor": unicode(request.user),
"course": course_key.to_deprecated_string()
}
track.views.server_track(request, "reset-student-attempts", event, page="idashboard")
msg += "<font color='green'>{text}</font>".format(
text=_("Module state successfully reset!")
)
except Exception as err: # pylint: disable=broad-except
error_msg = _("Couldn't reset module state for {id}/{url}. ").format(
id=unique_student_identifier, url=problem_location_str
)
msg += "<font color='red'>{err_msg} ({err})</font>".format(err_msg=error_msg, err=err)
log.exception(error_msg)
else:
# "Rescore student's problem submission" case
try:
instructor_task = submit_rescore_problem_for_student(request, module_state_key, student)
if instructor_task is None:
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for rescoring "{key}" for student {id}.').format(
key=module_state_key, id=unique_student_identifier
)
)
else:
track.views.server_track(
request,
"rescore-student-submission",
{
"problem": module_state_key,
"student": unique_student_identifier,
"course": course_key.to_deprecated_string()
},
page="idashboard"
)
except Exception as err: # pylint: disable=broad-except
msg += '<font color="red">{text}</font>'.format(
text=_('Failed to create a background task for rescoring "{key}": {id}.').format(
key=module_state_key, id=err.message
)
)
log.exception("Encountered exception from rescore: student '{0}' problem '{1}'".format(
unique_student_identifier, module_state_key
)
)
elif "Get link to student's progress page" in action:
unique_student_identifier = request.POST.get('unique_student_identifier', '')
# try to uniquely id student by email address or username
message, student = get_student_from_identifier(unique_student_identifier)
msg += message
if student is not None:
progress_url = reverse('student_progress', kwargs={
'course_id': course_key.to_deprecated_string(),
'student_id': student.id
})
track.views.server_track(
request,
"get-student-progress-page",
{
"student": unicode(student),
"instructor": unicode(request.user),
"course": course_key.to_deprecated_string()
},
page="idashboard"
)
msg += "<a href='{url}' target='_blank'>{text}</a>.".format(
url=progress_url,
text=_("Progress page for username: {username} with email address: {email}").format(
username=student.username, email=student.email
)
)
#----------------------------------------
# export grades to remote gradebook
elif action == 'List assignments available in remote gradebook':
msg2, datatable = _do_remote_gradebook(request.user, course, 'get-assignments')
msg += msg2
elif action == 'List assignments available for this course':
log.debug(action)
allgrades = get_student_grade_summary_data(request, course, get_grades=True, use_offline=use_offline)
assignments = [[x] for x in allgrades['assignments']]
datatable = {'header': [_('Assignment Name')]}
datatable['data'] = assignments
datatable['title'] = action
msg += 'assignments=<pre>%s</pre>' % assignments
elif action == 'List enrolled students matching remote gradebook':
stud_data = get_student_grade_summary_data(request, course, get_grades=False, use_offline=use_offline)
msg2, rg_stud_data = _do_remote_gradebook(request.user, course, 'get-membership')
datatable = {'header': ['Student email', 'Match?']}
rg_students = [x['email'] for x in rg_stud_data['retdata']]
def domatch(x):
return 'yes' if x.email in rg_students else 'No'
datatable['data'] = [[x.email, domatch(x)] for x in stud_data['students']]
datatable['title'] = action
elif action in ['Display grades for assignment', 'Export grades for assignment to remote gradebook',
'Export CSV file of grades for assignment']:
log.debug(action)
datatable = {}
aname = request.POST.get('assignment_name', '')
if not aname:
msg += "<font color='red'>{text}</font>".format(text=_("Please enter an assignment name"))
else:
allgrades = get_student_grade_summary_data(request, course, get_grades=True, use_offline=use_offline)
if aname not in allgrades['assignments']:
msg += "<font color='red'>{text}</font>".format(
text=_("Invalid assignment name '{name}'").format(name=aname)
)
else:
aidx = allgrades['assignments'].index(aname)
datatable = {'header': [_('External email'), aname]}
ddata = []
for student in allgrades['students']: # do one by one in case there is a student who has only partial grades
try:
ddata.append([student.email, student.grades[aidx]])
except IndexError:
log.debug('No grade for assignment {idx} ({name}) for student {email}'.format(
idx=aidx, name=aname, email=student.email)
)
datatable['data'] = ddata
datatable['title'] = _('Grades for assignment "{name}"').format(name=aname)
if 'Export CSV' in action:
# generate and return CSV file
return return_csv('grades {name}.csv'.format(name=aname), datatable)
elif 'remote gradebook' in action:
file_pointer = StringIO()
return_csv('', datatable, file_pointer=file_pointer)
file_pointer.seek(0)
files = {'datafile': file_pointer}
msg2, __ = _do_remote_gradebook(request.user, course, 'post-grades', files=files)
msg += msg2
#----------------------------------------
# Admin
elif 'List course staff' in action:
role = CourseStaffRole(course.id)
datatable = _role_members_table(role, _("List of Staff"), course_key)
track.views.server_track(request, "list-staff", {}, page="idashboard")
elif 'List course instructors' in action and GlobalStaff().has_user(request.user):
role = CourseInstructorRole(course.id)
datatable = _role_members_table(role, _("List of Instructors"), course_key)
track.views.server_track(request, "list-instructors", {}, page="idashboard")
elif action == 'Add course staff':
uname = request.POST['staffuser']
role = CourseStaffRole(course.id)
msg += add_user_to_role(request, uname, role, 'staff', 'staff')
elif action == 'Add instructor' and request.user.is_staff:
uname = request.POST['instructor']
role = CourseInstructorRole(course.id)
msg += add_user_to_role(request, uname, role, 'instructor', 'instructor')
elif action == 'Remove course staff':
uname = request.POST['staffuser']
role = CourseStaffRole(course.id)
msg += remove_user_from_role(request, uname, role, 'staff', 'staff')
elif action == 'Remove instructor' and request.user.is_staff:
uname = request.POST['instructor']
role = CourseInstructorRole(course.id)
msg += remove_user_from_role(request, uname, role, 'instructor', 'instructor')
#----------------------------------------
# DataDump
elif 'Download CSV of all student profile data' in action:
enrolled_students = User.objects.filter(
courseenrollment__course_id=course_key,
courseenrollment__is_active=1,
).order_by('username').select_related("profile")
profkeys = ['name', 'language', 'location', 'year_of_birth', 'gender', 'level_of_education',
'mailing_address', 'goals']
datatable = {'header': ['username', 'email'] + profkeys}
def getdat(user):
"""
Return a list of profile data for the given user.
"""
profile = user.profile
return [user.username, user.email] + [getattr(profile, xkey, '') for xkey in profkeys]
datatable['data'] = [getdat(u) for u in enrolled_students]
datatable['title'] = _('Student profile data for course {course_id}').format(
course_id=course_key.to_deprecated_string()
)
return return_csv(
'profiledata_{course_id}.csv'.format(course_id=course_key.to_deprecated_string()),
datatable
)
elif 'Download CSV of all responses to problem' in action:
problem_to_dump = request.POST.get('problem_to_dump', '')
if problem_to_dump[-4:] == ".xml":
problem_to_dump = problem_to_dump[:-4]
try:
module_state_key = course_key.make_usage_key_from_deprecated_string(problem_to_dump)
smdat = StudentModule.objects.filter(
course_id=course_key,
module_state_key=module_state_key
)
smdat = smdat.order_by('student')
msg += _("Found {num} records to dump.").format(num=smdat)
except Exception as err: # pylint: disable=broad-except
msg += "<font color='red'>{text}</font><pre>{err}</pre>".format(
text=_("Couldn't find module with that urlname."),
err=escape(err)
)
smdat = []
if smdat:
datatable = {'header': ['username', 'state']}
datatable['data'] = [[x.student.username, x.state] for x in smdat]
datatable['title'] = _('Student state for problem {problem}').format(problem=problem_to_dump)
return return_csv('student_state_from_{problem}.csv'.format(problem=problem_to_dump), datatable)
elif 'Download CSV of all student anonymized IDs' in action:
students = User.objects.filter(
courseenrollment__course_id=course_key,
).order_by('id')
datatable = {'header': ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']}
datatable['data'] = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_key, save=False)] for s in students]
return return_csv(course_key.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', datatable)
#----------------------------------------
# Group management
elif 'List beta testers' in action:
role = CourseBetaTesterRole(course.id)
datatable = _role_members_table(role, _("List of Beta Testers"), course_key)
track.views.server_track(request, "list-beta-testers", {}, page="idashboard")
elif action == 'Add beta testers':
users = request.POST['betausers']
log.debug("users: {0!r}".format(users))
role = CourseBetaTesterRole(course.id)
for username_or_email in split_by_comma_and_whitespace(users):
msg += "<p>{0}</p>".format(
add_user_to_role(request, username_or_email, role, 'beta testers', 'beta-tester'))
elif action == 'Remove beta testers':
users = request.POST['betausers']
role = CourseBetaTesterRole(course.id)
for username_or_email in split_by_comma_and_whitespace(users):
msg += "<p>{0}</p>".format(
remove_user_from_role(request, username_or_email, role, 'beta testers', 'beta-tester'))
#----------------------------------------
# forum administration
elif action == 'List course forum admins':
rolename = FORUM_ROLE_ADMINISTRATOR
datatable = {}
msg += _list_course_forum_members(course_key, rolename, datatable)
track.views.server_track(
request, "list-forum-admins", {"course": course_key.to_deprecated_string()}, page="idashboard"
)
elif action == 'Remove forum admin':
uname = request.POST['forumadmin']
msg += _update_forum_role_membership(uname, course, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_REMOVE)
track.views.server_track(
request, "remove-forum-admin", {"username": uname, "course": course_key.to_deprecated_string()},
page="idashboard"
)
elif action == 'Add forum admin':
uname = request.POST['forumadmin']
msg += _update_forum_role_membership(uname, course, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_ADD)
track.views.server_track(
request, "add-forum-admin", {"username": uname, "course": course_key.to_deprecated_string()},
page="idashboard"
)
elif action == 'List course forum moderators':
rolename = FORUM_ROLE_MODERATOR
datatable = {}
msg += _list_course_forum_members(course_key, rolename, datatable)
track.views.server_track(
request, "list-forum-mods", {"course": course_key.to_deprecated_string()}, page="idashboard"
)
elif action == 'Remove forum moderator':
uname = request.POST['forummoderator']
msg += _update_forum_role_membership(uname, course, FORUM_ROLE_MODERATOR, FORUM_ROLE_REMOVE)
track.views.server_track(
request, "remove-forum-mod", {"username": uname, "course": course_key.to_deprecated_string()},
page="idashboard"
)
elif action == 'Add forum moderator':
uname = request.POST['forummoderator']
msg += _update_forum_role_membership(uname, course, FORUM_ROLE_MODERATOR, FORUM_ROLE_ADD)
track.views.server_track(
request, "add-forum-mod", {"username": uname, "course": course_key.to_deprecated_string()},
page="idashboard"
)
elif action == 'List course forum community TAs':
rolename = FORUM_ROLE_COMMUNITY_TA
datatable = {}
msg += _list_course_forum_members(course_key, rolename, datatable)
track.views.server_track(
request, "list-forum-community-TAs", {"course": course_key.to_deprecated_string()},
page="idashboard"
)
elif action == 'Remove forum community TA':
uname = request.POST['forummoderator']
msg += _update_forum_role_membership(uname, course, FORUM_ROLE_COMMUNITY_TA, FORUM_ROLE_REMOVE)
track.views.server_track(
request, "remove-forum-community-TA", {
"username": uname, "course": course_key.to_deprecated_string()
},
page="idashboard"
)
elif action == 'Add forum community TA':
uname = request.POST['forummoderator']
msg += _update_forum_role_membership(uname, course, FORUM_ROLE_COMMUNITY_TA, FORUM_ROLE_ADD)
track.views.server_track(
request, "add-forum-community-TA", {
"username": uname, "course": course_key.to_deprecated_string()
},
page="idashboard"
)
#----------------------------------------
# enrollment
elif action == 'List students who may enroll but may not have yet signed up':
ceaset = CourseEnrollmentAllowed.objects.filter(course_id=course_key)
datatable = {'header': ['StudentEmail']}
datatable['data'] = [[x.email] for x in ceaset]
datatable['title'] = action
elif action == 'Enroll multiple students':
is_shib_course = uses_shib(course)
students = request.POST.get('multiple_students', '')
auto_enroll = bool(request.POST.get('auto_enroll'))
email_students = bool(request.POST.get('email_students'))
secure = request.is_secure()
ret = _do_enroll_students(course, course_key, students, secure=secure, auto_enroll=auto_enroll, email_students=email_students, is_shib_course=is_shib_course)
datatable = ret['datatable']
elif action == 'Unenroll multiple students':
students = request.POST.get('multiple_students', '')
email_students = bool(request.POST.get('email_students'))
ret = _do_unenroll_students(course_key, students, email_students=email_students)
datatable = ret['datatable']
elif action == 'List sections available in remote gradebook':
msg2, datatable = _do_remote_gradebook(request.user, course, 'get-sections')
msg += msg2
elif action in ['List students in section in remote gradebook',
'Overload enrollment list using remote gradebook',
'Merge enrollment list with remote gradebook']:
section = request.POST.get('gradebook_section', '')
msg2, datatable = _do_remote_gradebook(request.user, course, 'get-membership', dict(section=section))
msg += msg2
if not 'List' in action:
students = ','.join([x['email'] for x in datatable['retdata']])
overload = 'Overload' in action
secure = request.is_secure()
ret = _do_enroll_students(course, course_key, students, secure=secure, overload=overload)
datatable = ret['datatable']
#----------------------------------------
# email
elif action == 'Send email':
email_to_option = request.POST.get("to_option")
email_subject = request.POST.get("subject")
html_message = request.POST.get("message")
if bulk_email_is_enabled_for_course(course_key):
try:
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
email = CourseEmail.create(
course_key.to_deprecated_string(), request.user, email_to_option, email_subject, html_message
)
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
submit_bulk_course_email(request, course_key, email.id) # pylint: disable=E1101
except Exception as err: # pylint: disable=broad-except
# Catch any errors and deliver a message to the user
error_msg = "Failed to send email! ({0})".format(err)
msg += "<font color='red'>" + error_msg + "</font>"
log.exception(error_msg)
else:
# If sending the task succeeds, deliver a success message to the user.
if email_to_option == "all":
text = _(
"Your email was successfully queued for sending. "
"Please note that for large classes, it may take up to an hour "
"(or more, if other courses are simultaneously sending email) "
"to send all emails."
)
else:
text = _('Your email was successfully queued for sending.')
email_msg = '<div class="msg msg-confirm"><p class="copy">{text}</p></div>'.format(text=text)
else:
msg += "<font color='red'>Email is not enabled for this course.</font>"
elif "Show Background Email Task History" in action:
message, datatable = get_background_task_table(course_key, task_type='bulk_course_email')
msg += message
elif "Show Background Email Task History" in action:
message, datatable = get_background_task_table(course_key, task_type='bulk_course_email')
msg += message
#----------------------------------------
# psychometrics
elif action == 'Generate Histogram and IRT Plot':
problem = request.POST['Problem']
nmsg, plots = psychoanalyze.generate_plots_for_problem(problem)
msg += nmsg
track.views.server_track(request, "psychometrics-histogram-generation", {"problem": unicode(problem)}, page="idashboard")
if idash_mode == 'Psychometrics':
problems = psychoanalyze.problems_with_psychometric_data(course_key)
#----------------------------------------
# analytics
def get_analytics_result(analytics_name):
"""Return data for an Analytic piece, or None if it doesn't exist. It
logs and swallows errors.
"""
url = settings.ANALYTICS_SERVER_URL + \
u"get?aname={}&course_id={}&apikey={}".format(
analytics_name, course_key.to_deprecated_string(), settings.ANALYTICS_API_KEY
)
try:
res = requests.get(url)
except Exception: # pylint: disable=broad-except
log.exception("Error trying to access analytics at %s", url)
return None
if res.status_code == codes.OK:
# WARNING: do not use req.json because the preloaded json doesn't
# preserve the order of the original record (hence OrderedDict).
return json.loads(res.content, object_pairs_hook=OrderedDict)
else:
log.error("Error fetching %s, code: %s, msg: %s",
url, res.status_code, res.content)
return None
analytics_results = {}
if idash_mode == 'Analytics':
DASHBOARD_ANALYTICS = [
# "StudentsAttemptedProblems", # num students who tried given problem
"StudentsDailyActivity", # active students by day
"StudentsDropoffPerDay", # active students dropoff by day
# "OverallGradeDistribution", # overall point distribution for course
"StudentsActive", # num students active in time period (default = 1wk)
"StudentsEnrolled", # num students enrolled
# "StudentsPerProblemCorrect", # foreach problem, num students correct
"ProblemGradeDistribution", # foreach problem, grade distribution
]
for analytic_name in DASHBOARD_ANALYTICS:
analytics_results[analytic_name] = get_analytics_result(analytic_name)
#----------------------------------------
# Metrics
metrics_results = {}
if settings.FEATURES.get('CLASS_DASHBOARD') and idash_mode == 'Metrics':
metrics_results['section_display_name'] = dashboard_data.get_section_display_name(course_key)
metrics_results['section_has_problem'] = dashboard_data.get_array_section_has_problem(course_key)
#----------------------------------------
# offline grades?
if use_offline:
msg += "<br/><font color='orange'>{text}</font>".format(
text=_("Grades from {course_id}").format(
course_id=offline_grades_available(course_key)
)
)
# generate list of pending background tasks
if settings.FEATURES.get('ENABLE_INSTRUCTOR_BACKGROUND_TASKS'):
instructor_tasks = get_running_instructor_tasks(course_key)
else:
instructor_tasks = None
# determine if this is a studio-backed course so we can provide a link to edit this course in studio
is_studio_course = modulestore().get_modulestore_type(course_key) != ModuleStoreEnum.Type.xml
studio_url = None
if is_studio_course:
studio_url = get_cms_course_link(course)
email_editor = None
# HTML editor for email
if idash_mode == 'Email' and is_studio_course:
html_module = HtmlDescriptor(
course.system,
DictFieldData({'data': html_message}),
ScopeIds(None, None, None, course_key.make_usage_key('html', 'dummy'))
)
fragment = html_module.render('studio_view')
fragment = wrap_xblock(
'LmsRuntime', html_module, 'studio_view', fragment, None,
extra_data={"course-id": course_key.to_deprecated_string()},
usage_id_serializer=lambda usage_id: quote_slashes(usage_id.to_deprecated_string()),
request_token=request_token(request),
)
email_editor = fragment.content
# Enable instructor email only if the following conditions are met:
# 1. Feature flag is on
# 2. We have explicitly enabled email for the given course via django-admin
# 3. It is NOT an XML course
if bulk_email_is_enabled_for_course(course_key):
show_email_tab = True
# display course stats only if there is no other table to display:
course_stats = None
if not datatable:
course_stats = get_course_stats_table()
# disable buttons for large courses
disable_buttons = False
max_enrollment_for_buttons = settings.FEATURES.get("MAX_ENROLLMENT_INSTR_BUTTONS")
if max_enrollment_for_buttons is not None:
disable_buttons = enrollment_number > max_enrollment_for_buttons
#----------------------------------------
# context for rendering
context = {
'course': course,
'staff_access': True,
'admin_access': request.user.is_staff,
'instructor_access': instructor_access,
'forum_admin_access': forum_admin_access,
'datatable': datatable,
'course_stats': course_stats,
'msg': msg,
'modeflag': {idash_mode: 'selectedmode'},
'studio_url': studio_url,
'to_option': email_to_option, # email
'subject': email_subject, # email
'editor': email_editor, # email
'email_msg': email_msg, # email
'show_email_tab': show_email_tab, # email
'problems': problems, # psychometrics
'plots': plots, # psychometrics
'course_errors': modulestore().get_course_errors(course.id),
'instructor_tasks': instructor_tasks,
'offline_grade_log': offline_grades_available(course_key),
'cohorts_ajax_url': reverse('cohorts', kwargs={'course_key_string': course_key.to_deprecated_string()}),
'analytics_results': analytics_results,
'disable_buttons': disable_buttons,
'metrics_results': metrics_results,
}
context['standard_dashboard_url'] = reverse('instructor_dashboard', kwargs={'course_id': course_key.to_deprecated_string()})
return render_to_response('courseware/instructor_dashboard.html', context)
def _do_remote_gradebook(user, course, action, args=None, files=None):
'''
Perform remote gradebook action. Returns msg, datatable.
'''
rg = course.remote_gradebook
if not rg:
msg = _("No remote gradebook defined in course metadata")
return msg, {}
rgurl = settings.FEATURES.get('REMOTE_GRADEBOOK_URL', '')
if not rgurl:
msg = _("No remote gradebook url defined in settings.FEATURES")
return msg, {}
rgname = rg.get('name', '')
if not rgname:
msg = _("No gradebook name defined in course remote_gradebook metadata")
return msg, {}
if args is None:
args = {}
data = dict(submit=action, gradebook=rgname, user=user.email)
data.update(args)
try:
resp = requests.post(rgurl, data=data, verify=False, files=files)
retdict = json.loads(resp.content)
except Exception as err: # pylint: disable=broad-except
msg = _("Failed to communicate with gradebook server at {url}").format(url=rgurl) + "<br/>"
msg += _("Error: {err}").format(err=err)
msg += "<br/>resp={resp}".format(resp=resp.content)
msg += "<br/>data={data}".format(data=data)
return msg, {}
msg = '<pre>{msg}</pre>'.format(msg=retdict['msg'].replace('\n', '<br/>'))
retdata = retdict['data'] # a list of dicts
if retdata:
datatable = {'header': retdata[0].keys()}
datatable['data'] = [x.values() for x in retdata]
datatable['title'] = _('Remote gradebook response for {action}').format(action=action)
datatable['retdata'] = retdata
else:
datatable = {}
return msg, datatable
def _list_course_forum_members(course_key, rolename, datatable):
"""
Fills in datatable with forum membership information, for a given role,
so that it will be displayed on instructor dashboard.
course_ID = the CourseKey for a course
rolename = one of "Administrator", "Moderator", "Community TA"
Returns message status string to append to displayed message, if role is unknown.
"""
# make sure datatable is set up properly for display first, before checking for errors
datatable['header'] = [_('Username'), _('Full name'), _('Roles')]
datatable['title'] = _('List of Forum {name}s in course {id}').format(
name=rolename, id=course_key.to_deprecated_string()
)
datatable['data'] = []
try:
role = Role.objects.get(name=rolename, course_id=course_key)
except Role.DoesNotExist:
return '<font color="red">' + _('Error: unknown rolename "{rolename}"').format(rolename=rolename) + '</font>'
uset = role.users.all().order_by('username')
msg = 'Role = {0}'.format(rolename)
log.debug('role={0}'.format(rolename))
datatable['data'] = [[x.username, x.profile.name, ', '.join([
r.name for r in x.roles.filter(course_id=course_key).order_by('name')
])] for x in uset]
return msg
def _update_forum_role_membership(uname, course, rolename, add_or_remove):
'''
Supports adding a user to a course's forum role
uname = username string for user
course = course object
rolename = one of "Administrator", "Moderator", "Community TA"
add_or_remove = one of "add" or "remove"
Returns message status string to append to displayed message, Status is returned if user
or role is unknown, or if entry already exists when adding, or if entry doesn't exist when removing.
'''
# check that username and rolename are valid:
try:
user = User.objects.get(username=uname)
except User.DoesNotExist:
return '<font color="red">' + _('Error: unknown username "{username}"').format(username=uname) + '</font>'
try:
role = Role.objects.get(name=rolename, course_id=course.id)
except Role.DoesNotExist:
return '<font color="red">' + _('Error: unknown rolename "{rolename}"').format(rolename=rolename) + '</font>'
# check whether role already has the specified user:
alreadyexists = role.users.filter(username=uname).exists()
msg = ''
log.debug('rolename={0}'.format(rolename))
if add_or_remove == FORUM_ROLE_REMOVE:
if not alreadyexists:
msg = '<font color="red">' + _('Error: user "{username}" does not have rolename "{rolename}", cannot remove').format(username=uname, rolename=rolename) + '</font>'
else:
user.roles.remove(role)
msg = '<font color="green">' + _('Removed "{username}" from "{course_id}" forum role = "{rolename}"').format(username=user, course_id=course.id.to_deprecated_string(), rolename=rolename) + '</font>'
else:
if alreadyexists:
msg = '<font color="red">' + _('Error: user "{username}" already has rolename "{rolename}", cannot add').format(username=uname, rolename=rolename) + '</font>'
else:
if (rolename == FORUM_ROLE_ADMINISTRATOR and not has_access(user, 'staff', course)):
msg = '<font color="red">' + _('Error: user "{username}" should first be added as staff before adding as a forum administrator, cannot add').format(username=uname) + '</font>'
else:
user.roles.add(role)
msg = '<font color="green">' + _('Added "{username}" to "{course_id}" forum role = "{rolename}"').format(username=user, course_id=course.id.to_deprecated_string(), rolename=rolename) + '</font>'
return msg
def _role_members_table(role, title, course_key):
"""
Return a data table of usernames and names of users in group_name.
Arguments:
role -- a student.roles.AccessRole
title -- a descriptive title to show the user
Returns:
a dictionary with keys
'header': ['Username', 'Full name'],
'data': [[username, name] for all users]
'title': "{title} in course {course}"
"""
uset = role.users_with_role()
datatable = {'header': [_('Username'), _('Full name')]}
datatable['data'] = [[x.username, x.profile.name] for x in uset]
datatable['title'] = _('{title} in course {course_key}').format(title=title, course_key=course_key.to_deprecated_string())
return datatable
def _user_from_name_or_email(username_or_email):
"""
Return the `django.contrib.auth.User` with the supplied username or email.
If `username_or_email` contains an `@` it is treated as an email, otherwise
it is treated as the username
"""
username_or_email = strip_if_string(username_or_email)
if '@' in username_or_email:
return User.objects.get(email=username_or_email)
else:
return User.objects.get(username=username_or_email)
def add_user_to_role(request, username_or_email, role, group_title, event_name):
"""
Look up the given user by username (if no '@') or email (otherwise), and add them to group.
Arguments:
request: django request--used for tracking log
username_or_email: who to add. Decide if it's an email by presense of an '@'
group: A group name
group_title: what to call this group in messages to user--e.g. "beta-testers".
event_name: what to call this event when logging to tracking logs.
Returns:
html to insert in the message field
"""
username_or_email = strip_if_string(username_or_email)
try:
user = _user_from_name_or_email(username_or_email)
except User.DoesNotExist:
return u'<font color="red">Error: unknown username or email "{0}"</font>'.format(username_or_email)
role.add_users(user)
# Deal with historical event names
if event_name in ('staff', 'beta-tester'):
track.views.server_track(
request,
"add-or-remove-user-group",
{
"event_name": event_name,
"user": unicode(user),
"event": "add"
},
page="idashboard"
)
else:
track.views.server_track(request, "add-instructor", {"instructor": unicode(user)}, page="idashboard")
return '<font color="green">Added {0} to {1}</font>'.format(user, group_title)
def remove_user_from_role(request, username_or_email, role, group_title, event_name):
"""
Look up the given user by username (if no '@') or email (otherwise), and remove them from the supplied role.
Arguments:
request: django request--used for tracking log
username_or_email: who to remove. Decide if it's an email by presense of an '@'
role: A student.roles.AccessRole
group_title: what to call this group in messages to user--e.g. "beta-testers".
event_name: what to call this event when logging to tracking logs.
Returns:
html to insert in the message field
"""
username_or_email = strip_if_string(username_or_email)
try:
user = _user_from_name_or_email(username_or_email)
except User.DoesNotExist:
return u'<font color="red">Error: unknown username or email "{0}"</font>'.format(username_or_email)
role.remove_users(user)
# Deal with historical event names
if event_name in ('staff', 'beta-tester'):
track.views.server_track(
request,
"add-or-remove-user-group",
{
"event_name": event_name,
"user": unicode(user),
"event": "remove"
},
page="idashboard"
)
else:
track.views.server_track(request, "remove-instructor", {"instructor": unicode(user)}, page="idashboard")
return '<font color="green">Removed {0} from {1}</font>'.format(user, group_title)
class GradeTable(object):
"""
Keep track of grades, by student, for all graded assignment
components. Each student's grades are stored in a list. The
index of this list specifies the assignment component. Not
all lists have the same length, because at the start of going
through the set of grades, it is unknown what assignment
compoments exist. This is because some students may not do
all the assignment components.
The student grades are then stored in a dict, with the student
id as the key.
"""
def __init__(self):
self.components = OrderedDict()
self.grades = {}
self._current_row = {}
def _add_grade_to_row(self, component, score):
"""Creates component if needed, and assigns score
Args:
component (str): Course component being graded
score (float): Score of student on component
Returns:
None
"""
component_index = self.components.setdefault(component, len(self.components))
self._current_row[component_index] = score
@contextmanager
def add_row(self, student_id):
"""Context management for a row of grades
Uses a new dictionary to get all grades of a specified student
and closes by adding that dict to the internal table.
Args:
student_id (str): Student id that is having grades set
"""
self._current_row = {}
yield self._add_grade_to_row
self.grades[student_id] = self._current_row
def get_grade(self, student_id):
"""Retrieves padded list of grades for specified student
Args:
student_id (str): Student ID for desired grades
Returns:
list: Ordered list of grades for student
"""
row = self.grades.get(student_id, [])
ncomp = len(self.components)
return [row.get(comp, None) for comp in range(ncomp)]
def get_graded_components(self):
"""
Return a list of components that have been
discovered so far.
"""
return self.components.keys()
def get_student_grade_summary_data(request, course, get_grades=True, get_raw_scores=False, use_offline=False):
"""
Return data arrays with student identity and grades for specified course.
course = CourseDescriptor
course_key = course ID
Note: both are passed in, only because instructor_dashboard already has them already.
returns datatable = dict(header=header, data=data)
where
header = list of strings labeling the data fields
data = list (one per student) of lists of data corresponding to the fields
If get_raw_scores=True, then instead of grade summaries, the raw grades for all graded modules are returned.
"""
course_key = course.id
enrolled_students = User.objects.filter(
courseenrollment__course_id=course_key,
courseenrollment__is_active=1,
).prefetch_related("groups").order_by('username')
header = [_('ID'), _('Username'), _('Full Name'), _('edX email'), _('External email')]
datatable = {'header': header, 'students': enrolled_students}
data = []
gtab = GradeTable()
for student in enrolled_students:
datarow = [student.id, student.username, student.profile.name, student.email]
try:
datarow.append(student.externalauthmap.external_email)
except: # ExternalAuthMap.DoesNotExist
datarow.append('')
if get_grades:
gradeset = student_grades(student, request, course, keep_raw_scores=get_raw_scores, use_offline=use_offline)
log.debug('student={0}, gradeset={1}'.format(student, gradeset))
with gtab.add_row(student.id) as add_grade:
if get_raw_scores:
# TODO (ichuang) encode Score as dict instead of as list, so score[0] -> score['earned']
for score in gradeset['raw_scores']:
add_grade(score.section, getattr(score, 'earned', score[0]))
else:
for grade_item in gradeset['section_breakdown']:
add_grade(grade_item['label'], grade_item['percent'])
student.grades = gtab.get_grade(student.id)
data.append(datarow)
# if getting grades, need to do a second pass, and add grades to each datarow;
# on the first pass we don't know all the graded components
if get_grades:
for datarow in data:
# get grades for student
sgrades = gtab.get_grade(datarow[0])
datarow += sgrades
# get graded components and add to table header
assignments = gtab.get_graded_components()
header += assignments
datatable['assignments'] = assignments
datatable['data'] = data
return datatable
#-----------------------------------------------------------------------------
# Gradebook has moved to instructor.api.spoc_gradebook #
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
def grade_summary(request, course_key):
"""Display the grade summary for a course."""
course = get_course_with_access(request.user, 'staff', course_key)
# For now, just a page
context = {'course': course,
'staff_access': True, }
return render_to_response('courseware/grade_summary.html', context)
#-----------------------------------------------------------------------------
# enrollment
def _do_enroll_students(course, course_key, students, secure=False, overload=False, auto_enroll=False, email_students=False, is_shib_course=False):
"""
Do the actual work of enrolling multiple students, presented as a string
of emails separated by commas or returns
`course` is course object
`course_key` id of course (a CourseKey)
`students` string of student emails separated by commas or returns (a `str`)
`overload` un-enrolls all existing students (a `boolean`)
`auto_enroll` is user input preference (a `boolean`)
`email_students` is user input preference (a `boolean`)
"""
new_students, new_students_lc = get_and_clean_student_list(students)
status = dict([x, 'unprocessed'] for x in new_students)
if overload: # delete all but staff
todelete = CourseEnrollment.objects.filter(course_id=course_key)
for ce in todelete:
if not has_access(ce.user, 'staff', course) and ce.user.email.lower() not in new_students_lc:
status[ce.user.email] = 'deleted'
ce.deactivate()
else:
status[ce.user.email] = 'is staff'
ceaset = CourseEnrollmentAllowed.objects.filter(course_id=course_key)
for cea in ceaset:
status[cea.email] = 'removed from pending enrollment list'
ceaset.delete()
if email_students:
protocol = 'https' if secure else 'http'
stripped_site_name = microsite.get_value(
'SITE_NAME',
settings.SITE_NAME
)
# TODO: Use request.build_absolute_uri rather than '{proto}://{site}{path}'.format
# and check with the Services team that this works well with microsites
registration_url = '{proto}://{site}{path}'.format(
proto=protocol,
site=stripped_site_name,
path=reverse('student.views.register_user')
)
course_url = '{proto}://{site}{path}'.format(
proto=protocol,
site=stripped_site_name,
path=reverse('course_root', kwargs={'course_id': course_key.to_deprecated_string()})
)
# We can't get the url to the course's About page if the marketing site is enabled.
course_about_url = None
if not settings.FEATURES.get('ENABLE_MKTG_SITE', False):
course_about_url = u'{proto}://{site}{path}'.format(
proto=protocol,
site=stripped_site_name,
path=reverse('about_course', kwargs={'course_id': course_key.to_deprecated_string()})
)
# Composition of email
d = {
'site_name': stripped_site_name,
'registration_url': registration_url,
'course': course,
'auto_enroll': auto_enroll,
'course_url': course_url,
'course_about_url': course_about_url,
'is_shib_course': is_shib_course
}
for student in new_students:
try:
user = User.objects.get(email=student)
except User.DoesNotExist:
#Student not signed up yet, put in pending enrollment allowed table
cea = CourseEnrollmentAllowed.objects.filter(email=student, course_id=course_key)
#If enrollmentallowed already exists, update auto_enroll flag to however it was set in UI
#Will be 0 or 1 records as there is a unique key on email + course_id
if cea:
cea[0].auto_enroll = auto_enroll
cea[0].save()
status[student] = 'user does not exist, enrollment already allowed, pending with auto enrollment ' \
+ ('on' if auto_enroll else 'off')
continue
#EnrollmentAllowed doesn't exist so create it
cea = CourseEnrollmentAllowed(email=student, course_id=course_key, auto_enroll=auto_enroll)
cea.save()
status[student] = 'user does not exist, enrollment allowed, pending with auto enrollment ' \
+ ('on' if auto_enroll else 'off')
if email_students:
# User is allowed to enroll but has not signed up yet
d['email_address'] = student
d['message'] = 'allowed_enroll'
send_mail_ret = send_mail_to_student(student, d)
status[student] += (', email sent' if send_mail_ret else '')
continue
# Student has already registered
if CourseEnrollment.is_enrolled(user, course_key):
status[student] = 'already enrolled'
continue
try:
# Not enrolled yet
CourseEnrollment.enroll(user, course_key)
status[student] = 'added'
if email_students:
# User enrolled for first time, populate dict with user specific info
d['email_address'] = student
d['full_name'] = user.profile.name
d['message'] = 'enrolled_enroll'
send_mail_ret = send_mail_to_student(student, d)
status[student] += (', email sent' if send_mail_ret else '')
except:
status[student] = 'rejected'
datatable = {'header': ['StudentEmail', 'action']}
datatable['data'] = [[x, status[x]] for x in sorted(status)]
datatable['title'] = _('Enrollment of students')
def sf(stat):
return [x for x in status if status[x] == stat]
data = dict(added=sf('added'), rejected=sf('rejected') + sf('exists'),
deleted=sf('deleted'), datatable=datatable)
return data
#Unenrollment
def _do_unenroll_students(course_key, students, email_students=False):
"""
Do the actual work of un-enrolling multiple students, presented as a string
of emails separated by commas or returns
`course_key` is id of course (a `str`)
`students` is string of student emails separated by commas or returns (a `str`)
`email_students` is user input preference (a `boolean`)
"""
old_students, __ = get_and_clean_student_list(students)
status = dict([x, 'unprocessed'] for x in old_students)
stripped_site_name = microsite.get_value(
'SITE_NAME',
settings.SITE_NAME
)
if email_students:
course = modulestore().get_course(course_key)
#Composition of email
d = {'site_name': stripped_site_name,
'course': course}
for student in old_students:
isok = False
cea = CourseEnrollmentAllowed.objects.filter(course_id=course_key, email=student)
#Will be 0 or 1 records as there is a unique key on email + course_id
if cea:
cea[0].delete()
status[student] = "un-enrolled"
isok = True
try:
user = User.objects.get(email=student)
except User.DoesNotExist:
if isok and email_students:
#User was allowed to join but had not signed up yet
d['email_address'] = student
d['message'] = 'allowed_unenroll'
send_mail_ret = send_mail_to_student(student, d)
status[student] += (', email sent' if send_mail_ret else '')
continue
#Will be 0 or 1 records as there is a unique key on user + course_id
if CourseEnrollment.is_enrolled(user, course_key):
try:
CourseEnrollment.unenroll(user, course_key)
status[student] = "un-enrolled"
if email_students:
#User was enrolled
d['email_address'] = student
d['full_name'] = user.profile.name
d['message'] = 'enrolled_unenroll'
send_mail_ret = send_mail_to_student(student, d)
status[student] += (', email sent' if send_mail_ret else '')
except Exception: # pylint: disable=broad-except
if not isok:
status[student] = "Error! Failed to un-enroll"
datatable = {'header': ['StudentEmail', 'action']}
datatable['data'] = [[x, status[x]] for x in sorted(status)]
datatable['title'] = _('Un-enrollment of students')
data = dict(datatable=datatable)
return data
def send_mail_to_student(student, param_dict):
"""
Construct the email using templates and then send it.
`student` is the student's email address (a `str`),
`param_dict` is a `dict` with keys [
`site_name`: name given to edX instance (a `str`)
`registration_url`: url for registration (a `str`)
`course_key`: id of course (a CourseKey)
`auto_enroll`: user input option (a `str`)
`course_url`: url of course (a `str`)
`email_address`: email of student (a `str`)
`full_name`: student full name (a `str`)
`message`: type of email to send and template to use (a `str`)
`is_shib_course`: (a `boolean`)
]
Returns a boolean indicating whether the email was sent successfully.
"""
# add some helpers and microconfig subsitutions
if 'course' in param_dict:
param_dict['course_name'] = param_dict['course'].display_name_with_default
param_dict['site_name'] = microsite.get_value(
'SITE_NAME',
param_dict.get('site_name', '')
)
subject = None
message = None
message_type = param_dict['message']
email_template_dict = {
'allowed_enroll': ('emails/enroll_email_allowedsubject.txt', 'emails/enroll_email_allowedmessage.txt'),
'enrolled_enroll': ('emails/enroll_email_enrolledsubject.txt', 'emails/enroll_email_enrolledmessage.txt'),
'allowed_unenroll': ('emails/unenroll_email_subject.txt', 'emails/unenroll_email_allowedmessage.txt'),
'enrolled_unenroll': ('emails/unenroll_email_subject.txt', 'emails/unenroll_email_enrolledmessage.txt'),
}
subject_template, message_template = email_template_dict.get(message_type, (None, None))
if subject_template is not None and message_template is not None:
subject = render_to_string(subject_template, param_dict)
message = render_to_string(message_template, param_dict)
if subject and message:
# Remove leading and trailing whitespace from body
message = message.strip()
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
from_address = microsite.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
send_mail(subject, message, from_address, [student], fail_silently=False)
return True
else:
return False
def get_and_clean_student_list(students):
"""
Separate out individual student email from the comma, or space separated string.
`students` is string of student emails separated by commas or returns (a `str`)
Returns:
students: list of cleaned student emails
students_lc: list of lower case cleaned student emails
"""
students = split_by_comma_and_whitespace(students)
students = [unicode(s.strip()) for s in students]
students = [s for s in students if s != '']
students_lc = [x.lower() for x in students]
return students, students_lc
#-----------------------------------------------------------------------------
# answer distribution
def get_answers_distribution(request, course_key):
"""
Get the distribution of answers for all graded problems in the course.
Return a dict with two keys:
'header': a header row
'data': a list of rows
"""
course = get_course_with_access(request.user, 'staff', course_key)
dist = grades.answer_distributions(course.id)
d = {}
d['header'] = ['url_name', 'display name', 'answer id', 'answer', 'count']
d['data'] = [
[url_name, display_name, answer_id, a, answers[a]]
for (url_name, display_name, answer_id), answers in sorted(dist.items())
for a in answers
]
return d
#-----------------------------------------------------------------------------
def compute_course_stats(course):
"""
Compute course statistics, including number of problems, videos, html.
course is a CourseDescriptor from the xmodule system.
"""
# walk the course by using get_children() until we come to the leaves; count the
# number of different leaf types
counts = defaultdict(int)
def walk(module):
children = module.get_children()
category = module.__class__.__name__ # HtmlDescriptor, CapaDescriptor, ...
counts[category] += 1
for c in children:
walk(c)
walk(course)
stats = dict(counts) # number of each kind of module
return stats
def dump_grading_context(course):
"""
Dump information about course grading context (eg which problems are graded in what assignments)
Very useful for debugging grading_policy.json and policy.json
"""
msg = "-----------------------------------------------------------------------------\n"
msg += "Course grader:\n"
msg += '%s\n' % course.grader.__class__
graders = {}
if isinstance(course.grader, xmgraders.WeightedSubsectionsGrader):
msg += '\n'
msg += "Graded sections:\n"
for subgrader, category, weight in course.grader.sections:
msg += " subgrader=%s, type=%s, category=%s, weight=%s\n" % (subgrader.__class__, subgrader.type, category, weight)
subgrader.index = 1
graders[subgrader.type] = subgrader
msg += "-----------------------------------------------------------------------------\n"
msg += "Listing grading context for course %s\n" % course.id
gcontext = course.grading_context
msg += "graded sections:\n"
msg += '%s\n' % gcontext['graded_sections'].keys()
for (gsections, gsvals) in gcontext['graded_sections'].items():
msg += "--> Section %s:\n" % (gsections)
for sec in gsvals:
sdesc = sec['section_descriptor']
grade_format = getattr(sdesc, 'grade_format', None)
aname = ''
if grade_format in graders:
gfmt = graders[grade_format]
aname = '%s %02d' % (gfmt.short_label, gfmt.index)
gfmt.index += 1
elif sdesc.display_name in graders:
gfmt = graders[sdesc.display_name]
aname = '%s' % gfmt.short_label
notes = ''
if getattr(sdesc, 'score_by_attempt', False):
notes = ', score by attempt!'
msg += " %s (grade_format=%s, Assignment=%s%s)\n" % (s.display_name, grade_format, aname, notes)
msg += "all descriptors:\n"
msg += "length=%d\n" % len(gcontext['all_descriptors'])
msg = '<pre>%s</pre>' % msg.replace('<', '<')
return msg
def get_background_task_table(course_key, problem_url=None, student=None, task_type=None):
"""
Construct the "datatable" structure to represent background task history.
Filters the background task history to the specified course and problem.
If a student is provided, filters to only those tasks for which that student
was specified.
Returns a tuple of (msg, datatable), where the msg is a possible error message,
and the datatable is the datatable to be used for display.
"""
history_entries = get_instructor_task_history(course_key, problem_url, student, task_type)
datatable = {}
msg = ""
# first check to see if there is any history at all
# (note that we don't have to check that the arguments are valid; it
# just won't find any entries.)
if (history_entries.count()) == 0:
if problem_url is None:
msg += '<font color="red">Failed to find any background tasks for course "{course}".</font>'.format(
course=course_key.to_deprecated_string()
)
elif student is not None:
template = '<font color="red">' + _('Failed to find any background tasks for course "{course}", module "{problem}" and student "{student}".') + '</font>'
msg += template.format(course=course_key.to_deprecated_string(), problem=problem_url, student=student.username)
else:
msg += '<font color="red">' + _('Failed to find any background tasks for course "{course}" and module "{problem}".').format(
course=course_key.to_deprecated_string(), problem=problem_url
) + '</font>'
else:
datatable['header'] = ["Task Type",
"Task Id",
"Requester",
"Submitted",
"Duration (sec)",
"Task State",
"Task Status",
"Task Output"]
datatable['data'] = []
for instructor_task in history_entries:
# get duration info, if known:
duration_sec = 'unknown'
if hasattr(instructor_task, 'task_output') and instructor_task.task_output is not None:
task_output = json.loads(instructor_task.task_output)
if 'duration_ms' in task_output:
duration_sec = int(task_output['duration_ms'] / 1000.0)
# get progress status message:
success, task_message = get_task_completion_info(instructor_task)
status = "Complete" if success else "Incomplete"
# generate row for this task:
row = [
str(instructor_task.task_type),
str(instructor_task.task_id),
str(instructor_task.requester),
instructor_task.created.isoformat(' '),
duration_sec,
str(instructor_task.task_state),
status,
task_message
]
datatable['data'].append(row)
if problem_url is None:
datatable['title'] = "{course_id}".format(course_id=course_key.to_deprecated_string())
elif student is not None:
datatable['title'] = "{course_id} > {location} > {student}".format(
course_id=course_key.to_deprecated_string(),
location=problem_url,
student=student.username
)
else:
datatable['title'] = "{course_id} > {location}".format(
course_id=course_key.to_deprecated_string(), location=problem_url
)
return msg, datatable
def uses_shib(course):
"""
Used to return whether course has Shibboleth as the enrollment domain
Returns a boolean indicating if Shibboleth authentication is set for this course.
"""
return course.enrollment_domain and course.enrollment_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX)
|
xiandiancloud/edxplaltfom-xusong
|
lms/djangoapps/instructor/views/legacy.py
|
Python
|
agpl-3.0
| 82,769
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Gengeo(AutotoolsPackage):
"""GenGeo is a library of tools for creating complex particle
geometries for use in ESyS-Particle simulations. GenGeo is a standalone
application with a Python API that creates geometry files suitable for
importing into ESyS-Particle simulations. The functionality of GenGeo far
exceeds the in-simulation geometry creation utilities
provided by ESyS-Particle itself."""
homepage = "https://launchpad.net/esys-particle/gengeo"
url = "https://launchpad.net/esys-particle/trunk/3.0-alpha/+download/gengeo-163.tar.gz"
maintainers = ['dorton21']
version('163', sha256='9c896d430d8f315a45379d2b82e7d374f36259af66a745bfdee4c022a080d34d')
extends('python')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('boost+python')
depends_on('openmpi')
def autoreconf(self, spec, prefix):
autogen = Executable('./autogen.sh')
autogen()
def configure_args(self):
args = [
'--verbose',
'--with-boost=' + self.spec['boost'].prefix,
'CCFLAGS=-fpermissive',
'CXXFLAGS=-fpermissive',
]
return args
|
LLNL/spack
|
var/spack/repos/builtin/packages/gengeo/package.py
|
Python
|
lgpl-2.1
| 1,514
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
float_or_none,
int_or_none,
)
class JWPlatformBaseIE(InfoExtractor):
@staticmethod
def _find_jwplayer_data(webpage):
# TODO: Merge this with JWPlayer-related codes in generic.py
mobj = re.search(
'jwplayer\((?P<quote>[\'"])[^\'" ]+(?P=quote)\)\.setup\((?P<options>[^)]+)\)',
webpage)
if mobj:
return mobj.group('options')
def _extract_jwplayer_data(self, webpage, video_id, *args, **kwargs):
jwplayer_data = self._parse_json(
self._find_jwplayer_data(webpage), video_id)
return self._parse_jwplayer_data(
jwplayer_data, video_id, *args, **kwargs)
def _parse_jwplayer_data(self, jwplayer_data, video_id, require_title=True, m3u8_id=None, rtmp_params=None):
# JWPlayer backward compatibility: flattened playlists
# https://github.com/jwplayer/jwplayer/blob/v7.4.3/src/js/api/config.js#L81-L96
if 'playlist' not in jwplayer_data:
jwplayer_data = {'playlist': [jwplayer_data]}
video_data = jwplayer_data['playlist'][0]
# JWPlayer backward compatibility: flattened sources
# https://github.com/jwplayer/jwplayer/blob/v7.4.3/src/js/playlist/item.js#L29-L35
if 'sources' not in video_data:
video_data['sources'] = [video_data]
formats = []
for source in video_data['sources']:
source_url = self._proto_relative_url(source['file'])
source_type = source.get('type') or ''
if source_type in ('application/vnd.apple.mpegurl', 'hls') or determine_ext(source_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(
source_url, video_id, 'mp4', 'm3u8_native', m3u8_id=m3u8_id, fatal=False))
elif source_type.startswith('audio'):
formats.append({
'url': source_url,
'vcodec': 'none',
})
else:
a_format = {
'url': source_url,
'width': int_or_none(source.get('width')),
'height': int_or_none(source.get('height')),
}
if source_url.startswith('rtmp'):
a_format['ext'] = 'flv',
# See com/longtailvideo/jwplayer/media/RTMPMediaProvider.as
# of jwplayer.flash.swf
rtmp_url_parts = re.split(
r'((?:mp4|mp3|flv):)', source_url, 1)
if len(rtmp_url_parts) == 3:
rtmp_url, prefix, play_path = rtmp_url_parts
a_format.update({
'url': rtmp_url,
'play_path': prefix + play_path,
})
if rtmp_params:
a_format.update(rtmp_params)
formats.append(a_format)
self._sort_formats(formats)
subtitles = {}
tracks = video_data.get('tracks')
if tracks and isinstance(tracks, list):
for track in tracks:
if track.get('file') and track.get('kind') == 'captions':
subtitles.setdefault(track.get('label') or 'en', []).append({
'url': self._proto_relative_url(track['file'])
})
return {
'id': video_id,
'title': video_data['title'] if require_title else video_data.get('title'),
'description': video_data.get('description'),
'thumbnail': self._proto_relative_url(video_data.get('image')),
'timestamp': int_or_none(video_data.get('pubdate')),
'duration': float_or_none(jwplayer_data.get('duration')),
'subtitles': subtitles,
'formats': formats,
}
class JWPlatformIE(JWPlatformBaseIE):
_VALID_URL = r'(?:https?://content\.jwplatform\.com/(?:feeds|players|jw6)/|jwplatform:)(?P<id>[a-zA-Z0-9]{8})'
_TEST = {
'url': 'http://content.jwplatform.com/players/nPripu9l-ALJ3XQCI.js',
'md5': 'fa8899fa601eb7c83a64e9d568bdf325',
'info_dict': {
'id': 'nPripu9l',
'ext': 'mov',
'title': 'Big Buck Bunny Trailer',
'description': 'Big Buck Bunny is a short animated film by the Blender Institute. It is made using free and open source software.',
'upload_date': '20081127',
'timestamp': 1227796140,
}
}
@staticmethod
def _extract_url(webpage):
mobj = re.search(
r'<script[^>]+?src=["\'](?P<url>(?:https?:)?//content.jwplatform.com/players/[a-zA-Z0-9]{8})',
webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
video_id = self._match_id(url)
json_data = self._download_json('http://content.jwplatform.com/feeds/%s.json' % video_id, video_id)
return self._parse_jwplayer_data(json_data, video_id)
|
dntt1/youtube-dl
|
youtube_dl/extractor/jwplatform.py
|
Python
|
unlicense
| 5,161
|
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from six.moves.urllib.parse import quote, unquote
import tarfile
from xml.sax import saxutils
from time import time
from eventlet import sleep
import zlib
from swift.common.swob import Request, HTTPBadGateway, \
HTTPCreated, HTTPBadRequest, HTTPNotFound, HTTPUnauthorized, HTTPOk, \
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPNotAcceptable, \
HTTPLengthRequired, HTTPException, HTTPServerError, wsgify
from swift.common.utils import get_logger, register_swift_info
from swift.common import constraints
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND, HTTP_CONFLICT
class CreateContainerError(Exception):
def __init__(self, msg, status_int, status):
self.status_int = status_int
self.status = status
super(CreateContainerError, self).__init__(msg)
ACCEPTABLE_FORMATS = ['text/plain', 'application/json', 'application/xml',
'text/xml']
def get_response_body(data_format, data_dict, error_list):
"""
Returns a properly formatted response body according to format. Handles
json and xml, otherwise will return text/plain. Note: xml response does not
include xml declaration.
:params data_format: resulting format
:params data_dict: generated data about results.
:params error_list: list of quoted filenames that failed
"""
if data_format == 'application/json':
data_dict['Errors'] = error_list
return json.dumps(data_dict)
if data_format and data_format.endswith('/xml'):
output = '<delete>\n'
for key in sorted(data_dict):
xml_key = key.replace(' ', '_').lower()
output += '<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key)
output += '<errors>\n'
output += '\n'.join(
['<object>'
'<name>%s</name><status>%s</status>'
'</object>' % (saxutils.escape(name), status) for
name, status in error_list])
output += '</errors>\n</delete>\n'
return output
output = ''
for key in sorted(data_dict):
output += '%s: %s\n' % (key, data_dict[key])
output += 'Errors:\n'
output += '\n'.join(
['%s, %s' % (name, status)
for name, status in error_list])
return output
def pax_key_to_swift_header(pax_key):
if (pax_key == u"SCHILY.xattr.user.mime_type" or
pax_key == u"LIBARCHIVE.xattr.user.mime_type"):
return "Content-Type"
elif pax_key.startswith(u"SCHILY.xattr.user.meta."):
useful_part = pax_key[len(u"SCHILY.xattr.user.meta."):]
return "X-Object-Meta-" + useful_part.encode("utf-8")
elif pax_key.startswith(u"LIBARCHIVE.xattr.user.meta."):
useful_part = pax_key[len(u"LIBARCHIVE.xattr.user.meta."):]
return "X-Object-Meta-" + useful_part.encode("utf-8")
else:
# You can get things like atime/mtime/ctime or filesystem ACLs in
# pax headers; those aren't really user metadata. The same goes for
# other, non-user metadata.
return None
class Bulk(object):
"""
Middleware that will do many operations on a single request.
Extract Archive:
Expand tar files into a swift account. Request must be a PUT with the
query parameter ?extract-archive=format specifying the format of archive
file. Accepted formats are tar, tar.gz, and tar.bz2.
For a PUT to the following url:
/v1/AUTH_Account/$UPLOAD_PATH?extract-archive=tar.gz
UPLOAD_PATH is where the files will be expanded to. UPLOAD_PATH can be a
container, a pseudo-directory within a container, or an empty string. The
destination of a file in the archive will be built as follows:
/v1/AUTH_Account/$UPLOAD_PATH/$FILE_PATH
Where FILE_PATH is the file name from the listing in the tar file.
If the UPLOAD_PATH is an empty string, containers will be auto created
accordingly and files in the tar that would not map to any container (files
in the base directory) will be ignored.
Only regular files will be uploaded. Empty directories, symlinks, etc will
not be uploaded.
Content Type:
If the content-type header is set in the extract-archive call, Swift will
assign that content-type to all the underlying files. The bulk middleware
will extract the archive file and send the internal files using PUT
operations using the same headers from the original request
(e.g. auth-tokens, content-Type, etc.). Notice that any middleware call
that follows the bulk middleware does not know if this was a bulk request
or if these were individual requests sent by the user.
In order to make Swift detect the content-type for the files based on the
file extension, the content-type in the extract-archive call should not be
set. Alternatively, it is possible to explicitly tell swift to detect the
content type using this header:
X-Detect-Content-Type:true
For example:
curl -X PUT http://127.0.0.1/v1/AUTH_acc/cont/$?extract-archive=tar -T
backup.tar -H "Content-Type: application/x-tar" -H "X-Auth-Token: xxx"
-H "X-Detect-Content-Type:true"
Assigning Metadata:
The tar file format (1) allows for UTF-8 key/value pairs to be associated
with each file in an archive. If a file has extended attributes, then tar
will store those as key/value pairs. The bulk middleware can read those
extended attributes and convert them to Swift object metadata. Attributes
starting with "user.meta" are converted to object metadata, and
"user.mime_type" is converted to Content-Type.
For example:
setfattr -n user.mime_type -v "application/python-setup" setup.py
setfattr -n user.meta.lunch -v "burger and fries" setup.py
setfattr -n user.meta.dinner -v "baked ziti" setup.py
setfattr -n user.stuff -v "whee" setup.py
Will get translated to headers:
Content-Type: application/python-setup
X-Object-Meta-Lunch: burger and fries
X-Object-Meta-Dinner: baked ziti
The bulk middleware will handle xattrs stored by both GNU and BSD tar (2).
Only xattrs user.mime_type and user.meta.* are processed. Other attributes
are ignored.
Notes:
(1) The POSIX 1003.1-2001 (pax) format. The default format on GNU tar
1.27.1 or later.
(2) Even with pax-format tarballs, different encoders store xattrs slightly
differently; for example, GNU tar stores the xattr "user.userattribute" as
pax header "SCHILY.xattr.user.userattribute", while BSD tar (which uses
libarchive) stores it as "LIBARCHIVE.xattr.user.userattribute".
Response:
The response from bulk operations functions differently from other swift
responses. This is because a short request body sent from the client could
result in many operations on the proxy server and precautions need to be
made to prevent the request from timing out due to lack of activity. To
this end, the client will always receive a 200 OK response, regardless of
the actual success of the call. The body of the response must be parsed to
determine the actual success of the operation. In addition to this the
client may receive zero or more whitespace characters prepended to the
actual response body while the proxy server is completing the request.
The format of the response body defaults to text/plain but can be either
json or xml depending on the Accept header. Acceptable formats are
text/plain, application/json, application/xml, and text/xml. An example
body is as follows:
{"Response Status": "201 Created",
"Response Body": "",
"Errors": [],
"Number Files Created": 10}
If all valid files were uploaded successfully the Response Status will be
201 Created. If any files failed to be created the response code
corresponds to the subrequest's error. Possible codes are 400, 401, 502 (on
server errors), etc. In both cases the response body will specify the
number of files successfully uploaded and a list of the files that failed.
There are proxy logs created for each file (which becomes a subrequest) in
the tar. The subrequest's proxy log will have a swift.source set to "EA"
the log's content length will reflect the unzipped size of the file. If
double proxy-logging is used the leftmost logger will not have a
swift.source set and the content length will reflect the size of the
payload sent to the proxy (the unexpanded size of the tar.gz).
Bulk Delete:
Will delete multiple objects or containers from their account with a
single request. Responds to POST requests with query parameter
?bulk-delete set. The request url is your storage url. The Content-Type
should be set to text/plain. The body of the POST request will be a
newline separated list of url encoded objects to delete. You can delete
10,000 (configurable) objects per request. The objects specified in the
POST request body must be URL encoded and in the form:
/container_name/obj_name
or for a container (which must be empty at time of delete)
/container_name
The response is similar to extract archive as in every response will be a
200 OK and you must parse the response body for actual results. An example
response is:
{"Number Not Found": 0,
"Response Status": "200 OK",
"Response Body": "",
"Errors": [],
"Number Deleted": 6}
If all items were successfully deleted (or did not exist), the Response
Status will be 200 OK. If any failed to delete, the response code
corresponds to the subrequest's error. Possible codes are 400, 401, 502 (on
server errors), etc. In all cases the response body will specify the number
of items successfully deleted, not found, and a list of those that failed.
The return body will be formatted in the way specified in the request's
Accept header. Acceptable formats are text/plain, application/json,
application/xml, and text/xml.
There are proxy logs created for each object or container (which becomes a
subrequest) that is deleted. The subrequest's proxy log will have a
swift.source set to "BD" the log's content length of 0. If double
proxy-logging is used the leftmost logger will not have a
swift.source set and the content length will reflect the size of the
payload sent to the proxy (the list of objects/containers to be deleted).
"""
def __init__(self, app, conf, max_containers_per_extraction=10000,
max_failed_extractions=1000, max_deletes_per_request=10000,
max_failed_deletes=1000, yield_frequency=10, retry_count=0,
retry_interval=1.5, logger=None):
self.app = app
self.logger = logger or get_logger(conf, log_route='bulk')
self.max_containers = max_containers_per_extraction
self.max_failed_extractions = max_failed_extractions
self.max_failed_deletes = max_failed_deletes
self.max_deletes_per_request = max_deletes_per_request
self.yield_frequency = yield_frequency
self.retry_count = retry_count
self.retry_interval = retry_interval
self.max_path_length = constraints.MAX_OBJECT_NAME_LENGTH \
+ constraints.MAX_CONTAINER_NAME_LENGTH + 2
def create_container(self, req, container_path):
"""
Checks if the container exists and if not try to create it.
:params container_path: an unquoted path to a container to be created
:returns: True if created container, False if container exists
:raises: CreateContainerError when unable to create container
"""
new_env = req.environ.copy()
new_env['PATH_INFO'] = container_path
new_env['swift.source'] = 'EA'
new_env['REQUEST_METHOD'] = 'HEAD'
head_cont_req = Request.blank(container_path, environ=new_env)
resp = head_cont_req.get_response(self.app)
if resp.is_success:
return False
if resp.status_int == 404:
new_env = req.environ.copy()
new_env['PATH_INFO'] = container_path
new_env['swift.source'] = 'EA'
new_env['REQUEST_METHOD'] = 'PUT'
create_cont_req = Request.blank(container_path, environ=new_env)
resp = create_cont_req.get_response(self.app)
if resp.is_success:
return True
raise CreateContainerError(
"Create Container Failed: " + container_path,
resp.status_int, resp.status)
def get_objs_to_delete(self, req):
"""
Will populate objs_to_delete with data from request input.
:params req: a Swob request
:returns: a list of the contents of req.body when separated by newline.
:raises: HTTPException on failures
"""
line = ''
data_remaining = True
objs_to_delete = []
if req.content_length is None and \
req.headers.get('transfer-encoding', '').lower() != 'chunked':
raise HTTPLengthRequired(request=req)
while data_remaining:
if '\n' in line:
obj_to_delete, line = line.split('\n', 1)
obj_to_delete = obj_to_delete.strip()
objs_to_delete.append(
{'name': unquote(obj_to_delete)})
else:
data = req.body_file.read(self.max_path_length)
if data:
line += data
else:
data_remaining = False
obj_to_delete = line.strip()
if obj_to_delete:
objs_to_delete.append(
{'name': unquote(obj_to_delete)})
if len(objs_to_delete) > self.max_deletes_per_request:
raise HTTPRequestEntityTooLarge(
'Maximum Bulk Deletes: %d per request' %
self.max_deletes_per_request)
if len(line) > self.max_path_length * 2:
raise HTTPBadRequest('Invalid File Name')
return objs_to_delete
def handle_delete_iter(self, req, objs_to_delete=None,
user_agent='BulkDelete', swift_source='BD',
out_content_type='text/plain'):
"""
A generator that can be assigned to a swob Response's app_iter which,
when iterated over, will delete the objects specified in request body.
Will occasionally yield whitespace while request is being processed.
When the request is completed will yield a response body that can be
parsed to determine success. See above documentation for details.
:params req: a swob Request
:params objs_to_delete: a list of dictionaries that specifies the
objects to be deleted. If None, uses self.get_objs_to_delete to
query request.
"""
last_yield = time()
separator = ''
failed_files = []
resp_dict = {'Response Status': HTTPOk().status,
'Response Body': '',
'Number Deleted': 0,
'Number Not Found': 0}
try:
if not out_content_type:
raise HTTPNotAcceptable(request=req)
if out_content_type.endswith('/xml'):
yield '<?xml version="1.0" encoding="UTF-8"?>\n'
try:
vrs, account, _junk = req.split_path(2, 3, True)
except ValueError:
raise HTTPNotFound(request=req)
incoming_format = req.headers.get('Content-Type')
if incoming_format and \
not incoming_format.startswith('text/plain'):
# For now only accept newline separated object names
raise HTTPNotAcceptable(request=req)
if objs_to_delete is None:
objs_to_delete = self.get_objs_to_delete(req)
failed_file_response = {'type': HTTPBadRequest}
req.environ['eventlet.minimum_write_chunk_size'] = 0
for obj_to_delete in objs_to_delete:
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
obj_name = obj_to_delete['name']
if not obj_name:
continue
if len(failed_files) >= self.max_failed_deletes:
raise HTTPBadRequest('Max delete failures exceeded')
if obj_to_delete.get('error'):
if obj_to_delete['error']['code'] == HTTP_NOT_FOUND:
resp_dict['Number Not Found'] += 1
else:
failed_files.append([quote(obj_name),
obj_to_delete['error']['message']])
continue
delete_path = '/'.join(['', vrs, account,
obj_name.lstrip('/')])
if not constraints.check_utf8(delete_path):
failed_files.append([quote(obj_name),
HTTPPreconditionFailed().status])
continue
new_env = req.environ.copy()
new_env['PATH_INFO'] = delete_path
del(new_env['wsgi.input'])
new_env['CONTENT_LENGTH'] = 0
new_env['REQUEST_METHOD'] = 'DELETE'
new_env['HTTP_USER_AGENT'] = \
'%s %s' % (req.environ.get('HTTP_USER_AGENT'), user_agent)
new_env['swift.source'] = swift_source
self._process_delete(delete_path, obj_name, new_env, resp_dict,
failed_files, failed_file_response)
if failed_files:
resp_dict['Response Status'] = \
failed_file_response['type']().status
elif not (resp_dict['Number Deleted'] or
resp_dict['Number Not Found']):
resp_dict['Response Status'] = HTTPBadRequest().status
resp_dict['Response Body'] = 'Invalid bulk delete.'
except HTTPException as err:
resp_dict['Response Status'] = err.status
resp_dict['Response Body'] = err.body
except Exception:
self.logger.exception('Error in bulk delete.')
resp_dict['Response Status'] = HTTPServerError().status
yield separator + get_response_body(out_content_type,
resp_dict, failed_files)
def handle_extract_iter(self, req, compress_type,
out_content_type='text/plain'):
"""
A generator that can be assigned to a swob Response's app_iter which,
when iterated over, will extract and PUT the objects pulled from the
request body. Will occasionally yield whitespace while request is being
processed. When the request is completed will yield a response body
that can be parsed to determine success. See above documentation for
details.
:params req: a swob Request
:params compress_type: specifying the compression type of the tar.
Accepts '', 'gz', or 'bz2'
"""
resp_dict = {'Response Status': HTTPCreated().status,
'Response Body': '', 'Number Files Created': 0}
failed_files = []
last_yield = time()
separator = ''
containers_accessed = set()
try:
if not out_content_type:
raise HTTPNotAcceptable(request=req)
if out_content_type.endswith('/xml'):
yield '<?xml version="1.0" encoding="UTF-8"?>\n'
if req.content_length is None and \
req.headers.get('transfer-encoding',
'').lower() != 'chunked':
raise HTTPLengthRequired(request=req)
try:
vrs, account, extract_base = req.split_path(2, 3, True)
except ValueError:
raise HTTPNotFound(request=req)
extract_base = extract_base or ''
extract_base = extract_base.rstrip('/')
tar = tarfile.open(mode='r|' + compress_type,
fileobj=req.body_file)
failed_response_type = HTTPBadRequest
req.environ['eventlet.minimum_write_chunk_size'] = 0
containers_created = 0
while True:
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
tar_info = next(tar)
if tar_info is None or \
len(failed_files) >= self.max_failed_extractions:
break
if tar_info.isfile():
obj_path = tar_info.name
if obj_path.startswith('./'):
obj_path = obj_path[2:]
obj_path = obj_path.lstrip('/')
if extract_base:
obj_path = extract_base + '/' + obj_path
if '/' not in obj_path:
continue # ignore base level file
destination = '/'.join(
['', vrs, account, obj_path])
container = obj_path.split('/', 1)[0]
if not constraints.check_utf8(destination):
failed_files.append(
[quote(obj_path[:self.max_path_length]),
HTTPPreconditionFailed().status])
continue
if tar_info.size > constraints.MAX_FILE_SIZE:
failed_files.append([
quote(obj_path[:self.max_path_length]),
HTTPRequestEntityTooLarge().status])
continue
container_failure = None
if container not in containers_accessed:
cont_path = '/'.join(['', vrs, account, container])
try:
if self.create_container(req, cont_path):
containers_created += 1
if containers_created > self.max_containers:
raise HTTPBadRequest(
'More than %d containers to create '
'from tar.' % self.max_containers)
except CreateContainerError as err:
# the object PUT to this container still may
# succeed if acls are set
container_failure = [
quote(cont_path[:self.max_path_length]),
err.status]
if err.status_int == HTTP_UNAUTHORIZED:
raise HTTPUnauthorized(request=req)
except ValueError:
failed_files.append([
quote(obj_path[:self.max_path_length]),
HTTPBadRequest().status])
continue
tar_file = tar.extractfile(tar_info)
new_env = req.environ.copy()
new_env['REQUEST_METHOD'] = 'PUT'
new_env['wsgi.input'] = tar_file
new_env['PATH_INFO'] = destination
new_env['CONTENT_LENGTH'] = tar_info.size
new_env['swift.source'] = 'EA'
new_env['HTTP_USER_AGENT'] = \
'%s BulkExpand' % req.environ.get('HTTP_USER_AGENT')
create_obj_req = Request.blank(destination, new_env)
for pax_key, pax_value in tar_info.pax_headers.items():
header_name = pax_key_to_swift_header(pax_key)
if header_name:
# Both pax_key and pax_value are unicode
# strings; the key is already UTF-8 encoded, but
# we still have to encode the value.
create_obj_req.headers[header_name] = \
pax_value.encode("utf-8")
resp = create_obj_req.get_response(self.app)
containers_accessed.add(container)
if resp.is_success:
resp_dict['Number Files Created'] += 1
else:
if container_failure:
failed_files.append(container_failure)
if resp.status_int == HTTP_UNAUTHORIZED:
failed_files.append([
quote(obj_path[:self.max_path_length]),
HTTPUnauthorized().status])
raise HTTPUnauthorized(request=req)
if resp.status_int // 100 == 5:
failed_response_type = HTTPBadGateway
failed_files.append([
quote(obj_path[:self.max_path_length]),
resp.status])
if failed_files:
resp_dict['Response Status'] = failed_response_type().status
elif not resp_dict['Number Files Created']:
resp_dict['Response Status'] = HTTPBadRequest().status
resp_dict['Response Body'] = 'Invalid Tar File: No Valid Files'
except HTTPException as err:
resp_dict['Response Status'] = err.status
resp_dict['Response Body'] = err.body
except (tarfile.TarError, zlib.error) as tar_error:
resp_dict['Response Status'] = HTTPBadRequest().status
resp_dict['Response Body'] = 'Invalid Tar File: %s' % tar_error
except Exception:
self.logger.exception('Error in extract archive.')
resp_dict['Response Status'] = HTTPServerError().status
yield separator + get_response_body(
out_content_type, resp_dict, failed_files)
def _process_delete(self, delete_path, obj_name, env, resp_dict,
failed_files, failed_file_response, retry=0):
delete_obj_req = Request.blank(delete_path, env)
resp = delete_obj_req.get_response(self.app)
if resp.status_int // 100 == 2:
resp_dict['Number Deleted'] += 1
elif resp.status_int == HTTP_NOT_FOUND:
resp_dict['Number Not Found'] += 1
elif resp.status_int == HTTP_UNAUTHORIZED:
failed_files.append([quote(obj_name),
HTTPUnauthorized().status])
elif resp.status_int == HTTP_CONFLICT and \
self.retry_count > 0 and self.retry_count > retry:
retry += 1
sleep(self.retry_interval ** retry)
self._process_delete(delete_path, obj_name, env, resp_dict,
failed_files, failed_file_response,
retry)
else:
if resp.status_int // 100 == 5:
failed_file_response['type'] = HTTPBadGateway
failed_files.append([quote(obj_name), resp.status])
@wsgify
def __call__(self, req):
extract_type = req.params.get('extract-archive')
resp = None
if extract_type is not None and req.method == 'PUT':
archive_type = {
'tar': '', 'tar.gz': 'gz',
'tar.bz2': 'bz2'}.get(extract_type.lower().strip('.'))
if archive_type is not None:
resp = HTTPOk(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if out_content_type:
resp.content_type = out_content_type
resp.app_iter = self.handle_extract_iter(
req, archive_type, out_content_type=out_content_type)
else:
resp = HTTPBadRequest("Unsupported archive format")
if 'bulk-delete' in req.params and req.method in ['POST', 'DELETE']:
resp = HTTPOk(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if out_content_type:
resp.content_type = out_content_type
resp.app_iter = self.handle_delete_iter(
req, out_content_type=out_content_type)
return resp or self.app
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
max_containers_per_extraction = \
int(conf.get('max_containers_per_extraction', 10000))
max_failed_extractions = int(conf.get('max_failed_extractions', 1000))
max_deletes_per_request = int(conf.get('max_deletes_per_request', 10000))
max_failed_deletes = int(conf.get('max_failed_deletes', 1000))
yield_frequency = int(conf.get('yield_frequency', 10))
retry_count = int(conf.get('delete_container_retry_count', 0))
retry_interval = 1.5
register_swift_info(
'bulk_upload',
max_containers_per_extraction=max_containers_per_extraction,
max_failed_extractions=max_failed_extractions)
register_swift_info(
'bulk_delete',
max_deletes_per_request=max_deletes_per_request,
max_failed_deletes=max_failed_deletes)
def bulk_filter(app):
return Bulk(
app, conf,
max_containers_per_extraction=max_containers_per_extraction,
max_failed_extractions=max_failed_extractions,
max_deletes_per_request=max_deletes_per_request,
max_failed_deletes=max_failed_deletes,
yield_frequency=yield_frequency,
retry_count=retry_count,
retry_interval=retry_interval)
return bulk_filter
|
thiagodasilva/swift
|
swift/common/middleware/bulk.py
|
Python
|
apache-2.0
| 30,792
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains a Google Speech to Text operator.
"""
from typing import Optional
from google.api_core.retry import Retry
from google.cloud.speech_v1.types import RecognitionConfig
from airflow import AirflowException
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.speech_to_text import CloudSpeechToTextHook, RecognitionAudio
from airflow.utils.decorators import apply_defaults
class CloudSpeechToTextRecognizeSpeechOperator(BaseOperator):
"""
Recognizes speech from audio file and returns it as text.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:CloudSpeechToTextRecognizeSpeechOperator`
:param config: information to the recognizer that specifies how to process the request. See more:
https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/types.html#google.cloud.speech_v1.types.RecognitionConfig
:type config: dict or google.cloud.speech_v1.types.RecognitionConfig
:param audio: audio data to be recognized. See more:
https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/types.html#google.cloud.speech_v1.types.RecognitionAudio
:type audio: dict or google.cloud.speech_v1.types.RecognitionAudio
:param project_id: Optional, Google Cloud Platform Project ID where the Compute
Engine Instance exists. If set to None or missing, the default project_id from the GCP connection is
used.
:type project_id: str
:param gcp_conn_id: Optional, The connection ID used to connect to Google Cloud
Platform. Defaults to 'google_cloud_default'.
:type gcp_conn_id: str
:param retry: (Optional) A retry object used to retry requests. If None is specified,
requests will not be retried.
:type retry: google.api_core.retry.Retry
:param timeout: (Optional) The amount of time, in seconds, to wait for the request to complete.
Note that if retry is specified, the timeout applies to each individual attempt.
:type timeout: float
"""
# [START gcp_speech_to_text_synthesize_template_fields]
template_fields = ("audio", "config", "project_id", "gcp_conn_id", "timeout")
# [END gcp_speech_to_text_synthesize_template_fields]
@apply_defaults
def __init__(
self,
audio: RecognitionAudio,
config: RecognitionConfig,
project_id: Optional[str] = None,
gcp_conn_id: str = "google_cloud_default",
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
*args,
**kwargs
) -> None:
self.audio = audio
self.config = config
self.project_id = project_id
self.gcp_conn_id = gcp_conn_id
self.retry = retry
self.timeout = timeout
self._validate_inputs()
super().__init__(*args, **kwargs)
def _validate_inputs(self):
if self.audio == "":
raise AirflowException("The required parameter 'audio' is empty")
if self.config == "":
raise AirflowException("The required parameter 'config' is empty")
def execute(self, context):
hook = CloudSpeechToTextHook(gcp_conn_id=self.gcp_conn_id)
return hook.recognize_speech(
config=self.config, audio=self.audio, retry=self.retry, timeout=self.timeout
)
|
wileeam/airflow
|
airflow/providers/google/cloud/operators/speech_to_text.py
|
Python
|
apache-2.0
| 4,187
|
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
from django.conf.urls import url
from . import views
urlpatterns = [
url('^get_data?$', views.get_data, name='events_get_data'),
url(r'(?P<event_id>\d+)/$', views.detail, name='events_detail'),
url('^$', views.view_events, name='events'),
]
|
krux/graphite-web
|
webapp/graphite/events/urls.py
|
Python
|
apache-2.0
| 832
|
# Copyright 2013 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class TenantsNegativeTestJSON(base.BaseIdentityV2AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('ca9bb202-63dd-4240-8a07-8ef9c19c04bb')
def test_list_tenants_by_unauthorized_user(self):
# Non-administrator user should not be able to list tenants
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.list_tenants)
@test.attr(type=['negative'])
@test.idempotent_id('df33926c-1c96-4d8d-a762-79cc6b0c3cf4')
def test_list_tenant_request_without_token(self):
# Request to list tenants without a valid token should fail
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.list_tenants)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('162ba316-f18b-4987-8c0c-fd9140cd63ed')
def test_tenant_delete_by_unauthorized_user(self):
# Non-administrator user should not be able to delete a tenant
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.delete_tenant, tenant['id'])
@test.attr(type=['negative'])
@test.idempotent_id('e450db62-2e9d-418f-893a-54772d6386b1')
def test_tenant_delete_request_without_token(self):
# Request to delete a tenant without a valid token should fail
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.delete_tenant,
tenant['id'])
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('9c9a2aed-6e3c-467a-8f5c-89da9d1b516b')
def test_delete_non_existent_tenant(self):
# Attempt to delete a non existent tenant should fail
self.assertRaises(lib_exc.NotFound, self.client.delete_tenant,
str(uuid.uuid4().hex))
@test.attr(type=['negative'])
@test.idempotent_id('af16f44b-a849-46cb-9f13-a751c388f739')
def test_tenant_create_duplicate(self):
# Tenant names should be unique
tenant_name = data_utils.rand_name(name='tenant')
body = self.client.create_tenant(tenant_name)['tenant']
tenant = body
self.data.tenants.append(tenant)
tenant1_id = body.get('id')
self.addCleanup(self.client.delete_tenant, tenant1_id)
self.addCleanup(self.data.tenants.remove, tenant)
self.assertRaises(lib_exc.Conflict, self.client.create_tenant,
tenant_name)
@test.attr(type=['negative'])
@test.idempotent_id('d26b278a-6389-4702-8d6e-5980d80137e0')
def test_create_tenant_by_unauthorized_user(self):
# Non-administrator user should not be authorized to create a tenant
tenant_name = data_utils.rand_name(name='tenant')
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.create_tenant, tenant_name)
@test.attr(type=['negative'])
@test.idempotent_id('a3ee9d7e-6920-4dd5-9321-d4b2b7f0a638')
def test_create_tenant_request_without_token(self):
# Create tenant request without a token should not be authorized
tenant_name = data_utils.rand_name(name='tenant')
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.create_tenant,
tenant_name)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('5a2e4ca9-b0c0-486c-9c48-64a94fba2395')
def test_create_tenant_with_empty_name(self):
# Tenant name should not be empty
self.assertRaises(lib_exc.BadRequest, self.client.create_tenant,
name='')
@test.attr(type=['negative'])
@test.idempotent_id('2ff18d1e-dfe3-4359-9dc3-abf582c196b9')
def test_create_tenants_name_length_over_64(self):
# Tenant name length should not be greater than 64 characters
tenant_name = 'a' * 65
self.assertRaises(lib_exc.BadRequest, self.client.create_tenant,
tenant_name)
@test.attr(type=['negative'])
@test.idempotent_id('bd20dc2a-9557-4db7-b755-f48d952ad706')
def test_update_non_existent_tenant(self):
# Attempt to update a non existent tenant should fail
self.assertRaises(lib_exc.NotFound, self.client.update_tenant,
str(uuid.uuid4().hex))
@test.attr(type=['negative'])
@test.idempotent_id('41704dc5-c5f7-4f79-abfa-76e6fedc570b')
def test_tenant_update_by_unauthorized_user(self):
# Non-administrator user should not be able to update a tenant
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.update_tenant, tenant['id'])
@test.attr(type=['negative'])
@test.idempotent_id('7a421573-72c7-4c22-a98e-ce539219c657')
def test_tenant_update_request_without_token(self):
# Request to update a tenant without a valid token should fail
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.update_tenant,
tenant['id'])
self.client.auth_provider.clear_auth()
|
izadorozhna/tempest
|
tempest/api/identity/admin/v2/test_tenant_negative.py
|
Python
|
apache-2.0
| 6,865
|
# Copyright (c) 2013-2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Kyle Mestery, Cisco Systems, Inc.
# @author: Dave Tucker, Hewlett-Packard Development Company L.P.
import time
from oslo.config import cfg
import requests
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.extensions import portbindings
from neutron.openstack.common import excutils
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log
from neutron.plugins.common import constants
from neutron.plugins.ml2 import driver_api as api
LOG = log.getLogger(__name__)
ODL_NETWORK = 'network'
ODL_NETWORKS = 'networks'
ODL_SUBNET = 'subnet'
ODL_SUBNETS = 'subnets'
ODL_PORT = 'port'
ODL_PORTS = 'ports'
not_found_exception_map = {ODL_NETWORKS: n_exc.NetworkNotFound,
ODL_SUBNETS: n_exc.SubnetNotFound,
ODL_PORTS: n_exc.PortNotFound}
odl_opts = [
cfg.StrOpt('url',
help=_("HTTP URL of OpenDaylight REST interface.")),
cfg.StrOpt('username',
help=_("HTTP username for authentication")),
cfg.StrOpt('password', secret=True,
help=_("HTTP password for authentication")),
cfg.IntOpt('timeout', default=10,
help=_("HTTP timeout in seconds.")),
cfg.IntOpt('session_timeout', default=30,
help=_("Tomcat session timeout in minutes.")),
]
cfg.CONF.register_opts(odl_opts, "ml2_odl")
def try_del(d, keys):
"""Ignore key errors when deleting from a dictionary."""
for key in keys:
try:
del d[key]
except KeyError:
pass
class JsessionId(requests.auth.AuthBase):
"""Attaches the JSESSIONID and JSESSIONIDSSO cookies to an HTTP Request.
If the cookies are not available or when the session expires, a new
set of cookies are obtained.
"""
def __init__(self, url, username, password):
"""Initialization function for JsessionId."""
# NOTE(kmestery) The 'limit' paramater is intended to limit how much
# data is returned from ODL. This is not implemented in the Hydrogen
# release of OpenDaylight, but will be implemented in the Helium
# timeframe. Hydrogen will silently ignore this value.
self.url = str(url) + '/' + ODL_NETWORKS + '?limit=1'
self.username = username
self.password = password
self.auth_cookies = None
self.last_request = None
self.expired = None
self.session_timeout = cfg.CONF.ml2_odl.session_timeout * 60
self.session_deadline = 0
def obtain_auth_cookies(self):
"""Make a REST call to obtain cookies for ODL authenticiation."""
r = requests.get(self.url, auth=(self.username, self.password))
r.raise_for_status()
jsessionid = r.cookies.get('JSESSIONID')
jsessionidsso = r.cookies.get('JSESSIONIDSSO')
if jsessionid and jsessionidsso:
self.auth_cookies = dict(JSESSIONID=jsessionid,
JSESSIONIDSSO=jsessionidsso)
def __call__(self, r):
"""Verify timestamp for Tomcat session timeout."""
if time.time() > self.session_deadline:
self.obtain_auth_cookies()
self.session_deadline = time.time() + self.session_timeout
r.prepare_cookies(self.auth_cookies)
return r
class OpenDaylightMechanismDriver(api.MechanismDriver):
"""Mechanism Driver for OpenDaylight.
This driver was a port from the Tail-F NCS MechanismDriver. The API
exposed by ODL is slightly different from the API exposed by NCS,
but the general concepts are the same.
"""
auth = None
out_of_sync = True
def initialize(self):
self.url = cfg.CONF.ml2_odl.url
self.timeout = cfg.CONF.ml2_odl.timeout
self.username = cfg.CONF.ml2_odl.username
self.password = cfg.CONF.ml2_odl.password
self.auth = JsessionId(self.url, self.username, self.password)
self.vif_type = portbindings.VIF_TYPE_OVS
self.vif_details = {portbindings.CAP_PORT_FILTER: True}
# Postcommit hooks are used to trigger synchronization.
def create_network_postcommit(self, context):
self.synchronize('create', ODL_NETWORKS, context)
def update_network_postcommit(self, context):
self.synchronize('update', ODL_NETWORKS, context)
def delete_network_postcommit(self, context):
self.synchronize('delete', ODL_NETWORKS, context)
def create_subnet_postcommit(self, context):
self.synchronize('create', ODL_SUBNETS, context)
def update_subnet_postcommit(self, context):
self.synchronize('update', ODL_SUBNETS, context)
def delete_subnet_postcommit(self, context):
self.synchronize('delete', ODL_SUBNETS, context)
def create_port_postcommit(self, context):
self.synchronize('create', ODL_PORTS, context)
def update_port_postcommit(self, context):
self.synchronize('update', ODL_PORTS, context)
def delete_port_postcommit(self, context):
self.synchronize('delete', ODL_PORTS, context)
def synchronize(self, operation, object_type, context):
"""Synchronize ODL with Neutron following a configuration change."""
if self.out_of_sync:
self.sync_full(context)
else:
self.sync_object(operation, object_type, context)
def filter_create_network_attributes(self, network, context, dbcontext):
"""Filter out network attributes not required for a create."""
try_del(network, ['status', 'subnets'])
def filter_create_subnet_attributes(self, subnet, context, dbcontext):
"""Filter out subnet attributes not required for a create."""
pass
def filter_create_port_attributes(self, port, context, dbcontext):
"""Filter out port attributes not required for a create."""
self.add_security_groups(context, dbcontext, port)
# TODO(kmestery): Converting to uppercase due to ODL bug
# https://bugs.opendaylight.org/show_bug.cgi?id=477
port['mac_address'] = port['mac_address'].upper()
try_del(port, ['status'])
def sync_resources(self, resource_name, collection_name, resources,
context, dbcontext, attr_filter):
"""Sync objects from Neutron over to OpenDaylight.
This will handle syncing networks, subnets, and ports from Neutron to
OpenDaylight. It also filters out the requisite items which are not
valid for create API operations.
"""
to_be_synced = []
for resource in resources:
try:
urlpath = collection_name + '/' + resource['id']
self.sendjson('get', urlpath, None)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
attr_filter(resource, context, dbcontext)
to_be_synced.append(resource)
key = resource_name if len(to_be_synced) == 1 else collection_name
# 400 errors are returned if an object exists, which we ignore.
self.sendjson('post', collection_name, {key: to_be_synced}, [400])
@utils.synchronized('odl-sync-full')
def sync_full(self, context):
"""Resync the entire database to ODL.
Transition to the in-sync state on success.
Note: we only allow a single thead in here at a time.
"""
if not self.out_of_sync:
return
dbcontext = context._plugin_context
networks = context._plugin.get_networks(dbcontext)
subnets = context._plugin.get_subnets(dbcontext)
ports = context._plugin.get_ports(dbcontext)
self.sync_resources(ODL_NETWORK, ODL_NETWORKS, networks,
context, dbcontext,
self.filter_create_network_attributes)
self.sync_resources(ODL_SUBNET, ODL_SUBNETS, subnets,
context, dbcontext,
self.filter_create_subnet_attributes)
self.sync_resources(ODL_PORT, ODL_PORTS, ports,
context, dbcontext,
self.filter_create_port_attributes)
self.out_of_sync = False
def filter_update_network_attributes(self, network, context, dbcontext):
"""Filter out network attributes for an update operation."""
try_del(network, ['id', 'status', 'subnets', 'tenant_id'])
def filter_update_subnet_attributes(self, subnet, context, dbcontext):
"""Filter out subnet attributes for an update operation."""
try_del(subnet, ['id', 'network_id', 'ip_version', 'cidr',
'allocation_pools', 'tenant_id'])
def filter_update_port_attributes(self, port, context, dbcontext):
"""Filter out port attributes for an update operation."""
self.add_security_groups(context, dbcontext, port)
try_del(port, ['network_id', 'id', 'status', 'mac_address',
'tenant_id', 'fixed_ips'])
create_object_map = {ODL_NETWORKS: filter_create_network_attributes,
ODL_SUBNETS: filter_create_subnet_attributes,
ODL_PORTS: filter_create_port_attributes}
update_object_map = {ODL_NETWORKS: filter_update_network_attributes,
ODL_SUBNETS: filter_update_subnet_attributes,
ODL_PORTS: filter_update_port_attributes}
def sync_single_resource(self, operation, object_type, obj_id,
context, attr_filter_create, attr_filter_update):
"""Sync over a single resource from Neutron to OpenDaylight.
Handle syncing a single operation over to OpenDaylight, and correctly
filter attributes out which are not required for the requisite
operation (create or update) being handled.
"""
dbcontext = context._plugin_context
if operation == 'create':
urlpath = object_type
method = 'post'
else:
urlpath = object_type + '/' + obj_id
method = 'put'
try:
obj_getter = getattr(context._plugin, 'get_%s' % object_type[:-1])
resource = obj_getter(dbcontext, obj_id)
except not_found_exception_map[object_type]:
LOG.debug(_('%(object_type)s not found (%(obj_id)s)'),
{'object_type': object_type.capitalize(),
'obj_id': obj_id})
else:
if operation == 'create':
attr_filter_create(self, resource, context, dbcontext)
elif operation == 'update':
attr_filter_update(self, resource, context, dbcontext)
try:
# 400 errors are returned if an object exists, which we ignore.
self.sendjson(method, urlpath, {object_type[:-1]: resource},
[400])
except Exception:
with excutils.save_and_reraise_exception():
self.out_of_sync = True
def sync_object(self, operation, object_type, context):
"""Synchronize the single modified record to ODL."""
obj_id = context.current['id']
self.sync_single_resource(operation, object_type, obj_id, context,
self.create_object_map[object_type],
self.update_object_map[object_type])
def add_security_groups(self, context, dbcontext, port):
"""Populate the 'security_groups' field with entire records."""
groups = [context._plugin.get_security_group(dbcontext, sg)
for sg in port['security_groups']]
port['security_groups'] = groups
def sendjson(self, method, urlpath, obj, ignorecodes=[]):
"""Send json to the OpenDaylight controller."""
headers = {'Content-Type': 'application/json'}
data = jsonutils.dumps(obj, indent=2) if obj else None
if self.url:
url = '/'.join([self.url, urlpath])
LOG.debug(_('ODL-----> sending URL (%s) <-----ODL') % url)
LOG.debug(_('ODL-----> sending JSON (%s) <-----ODL') % obj)
r = requests.request(method, url=url,
headers=headers, data=data,
auth=self.auth, timeout=self.timeout)
# ignorecodes contains a list of HTTP error codes to ignore.
if r.status_code in ignorecodes:
return
r.raise_for_status()
def bind_port(self, context):
LOG.debug(_("Attempting to bind port %(port)s on "
"network %(network)s"),
{'port': context.current['id'],
'network': context.network.current['id']})
for segment in context.network.network_segments:
if self.check_segment(segment):
context.set_binding(segment[api.ID],
self.vif_type,
self.vif_details)
LOG.debug(_("Bound using segment: %s"), segment)
return
else:
LOG.debug(_("Refusing to bind port for segment ID %(id)s, "
"segment %(seg)s, phys net %(physnet)s, and "
"network type %(nettype)s"),
{'id': segment[api.ID],
'seg': segment[api.SEGMENTATION_ID],
'physnet': segment[api.PHYSICAL_NETWORK],
'nettype': segment[api.NETWORK_TYPE]})
def validate_port_binding(self, context):
if self.check_segment(context.bound_segment):
LOG.debug(_('Binding valid.'))
return True
LOG.warning(_("Binding invalid for port: %s"), context.current)
def unbind_port(self, context):
LOG.debug(_("Unbinding port %(port)s on "
"network %(network)s"),
{'port': context.current['id'],
'network': context.network.current['id']})
def check_segment(self, segment):
"""Verify a segment is valid for the OpenDaylight MechanismDriver.
Verify the requested segment is supported by ODL and return True or
False to indicate this to callers.
"""
network_type = segment[api.NETWORK_TYPE]
return network_type in [constants.TYPE_LOCAL, constants.TYPE_GRE,
constants.TYPE_VXLAN]
|
Juniper/contrail-dev-neutron
|
neutron/plugins/ml2/drivers/mechanism_odl.py
|
Python
|
apache-2.0
| 15,181
|
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, division, print_function
from functools import partial
import locale
import logging
import os
import ctypes
from ctypes import c_char_p, c_wchar_p
from ctypes import c_int, c_longlong
from ctypes import c_size_t, c_ssize_t
from ctypes import c_void_p
from ctypes import POINTER
from ctypes import create_string_buffer
from commoncode import command
from commoncode import paths
from commoncode import system
from commoncode import fileutils
import extractcode
from extractcode import ExtractError
from extractcode import ExtractErrorPasswordProtected
logger = logging.getLogger(__name__)
DEBUG = False
# logging.basicConfig(level=logging.DEBUG)
"""
libarchive2 is a minimal and specialized wrapper around a vendored libarchive
archive extraction library. It is inspired from several libarchive bindings
such as libarchive_c and python-libarchive for Python and others for Ruby.
"""
def load_lib():
"""
Return the loaded libarchive shared library object from vendored paths.
"""
root_dir = command.get_base_dirs(extractcode.root_dir)[0]
_bin_dir, lib_dir = command.get_bin_lib_dirs(root_dir)
libarchive = os.path.join(lib_dir, 'libarchive' + system.lib_ext)
# add lib path to the front of the PATH env var
if lib_dir not in os.environ['PATH'].split(os.pathsep):
new_path = os.pathsep.join([lib_dir, os.environ['PATH']])
os.environ['PATH'] = new_path
if os.path.exists(libarchive):
lib = ctypes.CDLL(libarchive)
if lib and lib._name:
return lib
raise ImportError('Failed to load libarchive: %(libarchive)r' % locals())
# NOTE: this is important to avoid timezone differences
os.environ['TZ'] = 'UTC'
# NOTE: this is important to avoid locale-specific errors on various OS
locale.setlocale(locale.LC_ALL, '')
libarchive = load_lib()
def extract(location, target_dir):
"""
Extract files from a libarchive-supported archive file at `location` in the
`target_dir`.
Return a list of warning messages if any.
Raise Exceptions on errors.
"""
assert location
assert target_dir
abs_location = os.path.abspath(os.path.expanduser(location))
abs_target_dir = os.path.abspath(os.path.expanduser(target_dir))
warnings = []
for entry in list_entries(abs_location):
if not (entry.isdir or entry.isfile):
continue
_target_path = entry.write(abs_target_dir, transform_path=paths.resolve)
if entry.warnings:
msgs = [w.strip('"\' ') for w in entry.warnings if w and w.strip('"\' ')]
msgs = msgs or ['No message provided']
formatted = entry.path + ': ' + '\n'.join(msgs)
if formatted not in warnings:
warnings.append(formatted)
return warnings
def list_entries(location):
"""
Return a list entries for archive file at `location`.
"""
assert location
abs_location = os.path.abspath(os.path.expanduser(location))
assert os.path.isfile(abs_location)
# TODO: harden error handling
with Archive(abs_location) as archive:
for entry in archive:
yield entry
class Archive(object):
"""
Represent an iterable archive containing Entries.
This is a context manager that can we used this way:
with Archive(location='/some/path') as arch:
for entry in arch:
do something with entry
"""
def __init__(self, location, uncompress=True, extract=True,
block_size=10240):
"""
Build an Archive object for file at `location`.
Must be used as a context manager using the with syntax:
with Archive('some.tgz') as archive:
for entry in archive:
# dome something with entry
If `uncompress` is True, the archive will be uncompressed first (e.g.
a tar.gz will be ungzipped).
If `extract` is True, the archive will be extracted (e.g. a cpio
will be extracted).
If both are are True, will be uncompressed then extracted (e.g. a
tar.xz will be unxz'ed then untarred).
"""
msg = 'At least one of `uncompress` or `extract` flag is required.'
assert uncompress or extract, msg
self.location = location
self.uncompress = uncompress
self.extract = extract
self.block_size = block_size
# pointer to the libarchive structure
self.archive_struct = None
def open(self):
"""
Open the archive for reading.
You must call close() when done to free up resources and avoid leaks.
Or use instead the Archive class as a context manager with `with`.
"""
# first close any existing opened struct for this file
self.close()
self.archive_struct = archive_reader()
if self.uncompress:
use_all_filters(self.archive_struct)
if extract:
use_all_formats(self.archive_struct)
try:
# TODO: ensure that we have proper exceptions raised
open_file(self.archive_struct, self.location, self.block_size)
except:
open_file_w(self.archive_struct, self.location, self.block_size)
return self
def close(self):
if self.archive_struct:
free_archive(self.archive_struct)
self.archive_struct = None
def iter(self):
assert self.archive_struct, 'Archive must be used as a context manager.'
entry_struct = new_entry()
try:
while 1:
try:
r = next_entry(self.archive_struct, entry_struct)
if r == ARCHIVE_EOF:
return
e = Entry(self, entry_struct)
except ArchiveWarning, aw:
if aw.msg and aw.msg not in e.warnings:
e.warnings.append(aw.msg)
yield e
finally:
if entry_struct:
free_entry(entry_struct)
def __enter__(self):
return self.open()
def __exit__(self, _type, _value, _traceback):
return self.close()
def __iter__(self):
return self.iter()
class Entry(object):
"""
Represent an Archive Entry, typically a file or a directory. The attribute
names are loosely based on the stdlib tarfile module Tarfile attributes.Some
attributes are not handled on purpose because they are never used: things
such as modes/perms/users/groups are never restored by design to ensure
extracted files are readable/writable and owned by the extracting user.
"""
def __init__(self, archive, entry_struct):
self.archive = archive
self.entry_struct = entry_struct
self.filetype = entry_type(self.entry_struct)
self.isfile = self.filetype & AE_IFMT == AE_IFREG
self.isdir = self.filetype & AE_IFMT == AE_IFDIR
self.isblk = self.filetype & AE_IFMT == AE_IFBLK
self.ischr = self.filetype & AE_IFMT == AE_IFCHR
self.isfifo = self.filetype & AE_IFMT == AE_IFIFO
self.issock = self.filetype & AE_IFMT == AE_IFSOCK
self.isspecial = self.ischr or self.isblk or self.isfifo or self.issock
# bytes
self.size = entry_size(self.entry_struct) or 0
# sec since epoch
self.time = entry_time(self.entry_struct) or 0
# all paths are byte strings not unicode
self.path = self._path_bytes(entry_path, entry_path_w)
self.issym = self.filetype & AE_IFMT == AE_IFLNK
# FIXME: could there be cases with link path and symlink is False?
if self.issym:
self.symlink_path = self._path_bytes(symlink_path, symlink_path_w)
self.hardlink_path = self._path_bytes(hardlink_path, hardlink_path_w)
# hardlinks do not have a filetype: we test the path instead
self.islnk = bool(self.hardlink_path)
self.warnings = []
def _path_bytes(self, func, func_w):
"""
Call path function `func` then call `func_w` if `func` does not provide
a path. Return a path as a byte string converted to UTF-8-encoded bytes
if this is unicode.
"""
path = func(self.entry_struct)
if not path:
path = func_w(self.entry_struct)
if isinstance(path, unicode):
path = path.encode('utf-8')
return path
def write(self, target_dir, transform_path=lambda x: x):
"""
Write entry to a file or directory saved relatively to the base_dir.
Return a tuple (path, warnings), with the path where the file or
directory was written or None if nothing was written to disk.
`transform_path` is a callable taking a path and returning a transformed
path. such as resolving relative paths, transliterating non portable
characters or other transformations. The default is a no-op lambda.
"""
if not self.archive.archive_struct:
raise ArchiveErrorIllegalOperationOnClosedArchive()
# skip links and special files
if not (self.isfile or self.isdir):
return
abs_target_dir = os.path.abspath(os.path.expanduser(target_dir))
# TODO: return some warning when original path has been transformed
clean_path = transform_path(self.path)
if self.isdir:
dir_path = os.path.join(abs_target_dir, clean_path)
fileutils.create_dir(dir_path)
return dir_path
# here isfile=True
try:
# create parent directories if needed
# TODO: also rename directories, segment by segment?
target_path = os.path.join(abs_target_dir, clean_path)
parent_path = os.path.dirname(target_path)
fileutils.create_dir(parent_path)
# TODO: return some warning when original path has been renamed?
unique_path = extractcode.new_name(target_path, is_dir=False)
chunk_len = 10240
sbuffer = create_string_buffer(chunk_len)
with open(unique_path, 'wb') as target:
chunk_size = 1
while chunk_size:
chunk_size = read_entry_data(self.archive.archive_struct,
sbuffer, chunk_len)
data = sbuffer.raw[0:chunk_size]
target.write(data)
os.utime(unique_path, (self.time, self.time))
return target_path
except ArchiveWarning, aw:
msg = str(aw).strip('\'" ') or 'No message provided.'
if msg not in self.warnings:
self.warnings.append(msg)
return target_path
def __repr__(self):
return ('Entry('
'path=%(path)r,'
'size=%(size)r,'
'isfile=%(isfile)r,'
'isdir=%(isdir)r,'
'islnk=%(islnk)r,'
'issym=%(issym)r,'
'isspecial=%(isspecial)r,'
')') % self.__dict__
class ArchiveException(ExtractError):
def __init__(self, rc=None, archive_struct=None, archive_func=None, root_ex=None):
self.root_ex = root_ex
if root_ex and isinstance(root_ex, ArchiveException):
self.rc = root_ex.rc
self.errno = root_ex.errno
self.msg = str(root_ex).strip('\'" ')
self.func = root_ex.func
else:
self.rc = rc
self.errno = archive_struct and errno(archive_struct) or None
self.msg = archive_struct and err_msg(archive_struct).strip('\'" ') or None
self.func = archive_func and archive_func.__name__ or None
def __str__(self):
msg = '%(msg)r'
if DEBUG:
msg += ': in function %(func)r with rc=%(rc)r, errno=%(errno)r, '
msg += 'root_ex=%(root_ex)s'
return msg % self.__dict__
class ArchiveWarning(ArchiveException):
pass
class ArchiveErrorRetryable(ArchiveException):
pass
class ArchiveError(ArchiveException):
pass
class ArchiveErrorFatal(ArchiveException):
pass
class ArchiveErrorFailedToWriteEntry(ArchiveException):
pass
class ArchiveErrorPasswordProtected(ArchiveException, ExtractErrorPasswordProtected):
pass
class ArchiveErrorIllegalOperationOnClosedArchive(ArchiveException):
pass
##################################
# C ctypes interface to libarchive
##################################
def errcheck(rc, archive_func, args, null=False):
"""
ctypes error check handler for functions returning int or or null if
null is True.
"""
if null:
if rc is None:
archive_struct = args and len(args) > 1 and args[0] or None
raise ArchiveError(rc, archive_struct, archive_func)
else:
return rc
if rc >= ARCHIVE_OK:
return rc
archive_struct = args[0]
if rc == ARCHIVE_RETRY:
raise ArchiveErrorRetryable(rc, archive_struct, archive_func)
if rc == ARCHIVE_WARN:
raise ArchiveWarning(rc, archive_struct, archive_func)
# anything else is a serious error, in general not recoverable.
raise ArchiveError(rc, archive_struct, archive_func)
errcheck_null = partial(errcheck, null=True)
# return codes
ARCHIVE_EOF = 1
ARCHIVE_OK = 0
ARCHIVE_RETRY = -10
ARCHIVE_WARN = -20
ARCHIVE_FAILED = -25
ARCHIVE_FATAL = -30
# stat/file types
AE_IFMT = 0o0170000
AE_IFREG = 0o0100000
AE_IFLNK = 0o0120000
AE_IFSOCK = 0o0140000
AE_IFCHR = 0o0020000
AE_IFBLK = 0o0060000
AE_IFDIR = 0o0040000
AE_IFIFO = 0o0010000
##########################
# C functions declarations
##########################
# NOTE: these are verbose on purpose to help with debugging and tracing lower
# level errors and issues
# archive level functions
##########################
"""
To read an archive, you must first obtain an initialized struct archive object
from archive_read_new().
Allocates and initializes a struct archive object suitable for reading from an
archive. NULL is returned on error.
"""
# struct archive * archive_read_new(void);
archive_reader = libarchive.archive_read_new
archive_reader.argtypes = []
archive_reader.restype = c_void_p
archive_reader.errcheck = errcheck_null
"""
Given a struct archive object, you can enable support for formats and filters.
Enables support for all available formats except the "raw" format.
Return ARCHIVE_OK on success, or ARCHIVE_FATAL.
Detailed error codes and textual descriptions are available from the
archive_errno() and archive_error_string() functions.
"""
# int archive_read_support_format_all(struct archive *);
use_all_formats = libarchive.archive_read_support_format_all
use_all_formats.argtypes = [c_void_p]
use_all_formats.restype = c_int
use_all_formats.errcheck = errcheck
"""
Given a struct archive object, you can enable support for formats and filters.
Enables support for the "raw" format.
The "raw" format handler allows libarchive to be used to read arbitrary
data. It treats any data stream as an archive with a single entry. The
pathname of this entry is "data ;" all other entry fields are unset. This is
not enabled by archive_read_support_format_all() in order to avoid erroneous
handling of damaged archives.
"""
# int archive_read_support_format_raw(struct archive *);
use_raw_formats = libarchive.archive_read_support_format_raw
use_raw_formats.argtypes = [c_void_p]
use_raw_formats.restype = c_int
use_raw_formats.errcheck = errcheck
"""
Given a struct archive object, you can enable support for formats and filters.
Enables all available decompression filters.
Return ARCHIVE_OK if the compression is fully supported, ARCHIVE_WARN if the
compression is supported only through an external program.
Detailed error codes and textual descriptions are available from the
archive_errno() and archive_error_string() functions.
"""
# int archive_read_support_filter_all(struct archive *);
use_all_filters = libarchive.archive_read_support_filter_all
use_all_filters.argtypes = [c_void_p]
use_all_filters.restype = c_int
use_all_filters.errcheck = errcheck
"""
Once formats and filters have been set, you open an archive filename for
actual reading.
Freeze the settings, open the archive, and prepare for reading entries.
Accepts a simple filename and a block size. A NULL filename represents
standard input.
Return ARCHIVE_OK on success, or ARCHIVE_FATAL.
Once you have finished reading data from the archive, you should call
archive_read_close() to close the archive, then call archive_read_free() to
release all resources, including all memory allocated by the library.
"""
# int archive_read_open_filename(struct archive *, const char *filename, size_t block_size);
open_file = libarchive.archive_read_open_filename
open_file.argtypes = [c_void_p, c_char_p, c_size_t]
open_file.restype = c_int
open_file.errcheck = errcheck
"""
Wide char version of archive_read_open_filename.
"""
# int archive_read_open_filename_w(struct archive *, const wchar_t *_filename, size_t _block_size);
open_file_w = libarchive.archive_read_open_filename_w
open_file_w.argtypes = [c_void_p, c_wchar_p, c_size_t]
open_file_w.restype = c_int
open_file_w.errcheck = errcheck
"""
When done with reading an archive you must free its resources.
Invokes archive_read_close() if it was not invoked manually, then release all
resources.
Return ARCHIVE_OK on success, or ARCHIVE_FATAL.
"""
# int archive_read_free(struct archive *);
free_archive = libarchive.archive_read_free
free_archive.argtypes = [c_void_p]
free_archive.restype = c_int
free_archive.errcheck = errcheck
# entry level functions
#######################
"""
You can think of a struct archive_entry as a heavy-duty version of struct stat
: it includes everything from struct stat plus associated pathname, textual
group and user names, etc. These objects are used by ManPageLibarchive3 to
represent the metadata associated with a particular entry in an archive.
"""
"""
Allocate and return a blank struct archive_entry object.
"""
# struct archive_entry * archive_entry_new(void);
new_entry = libarchive.archive_entry_new
new_entry.argtypes = []
new_entry.restype = c_void_p
new_entry.errcheck = errcheck_null
"""
Given an opened archive struct object, you can iterate through the archive
entries. An entry has a header with various data and usually a payload that is
the archived content.
Read the header for the next entry and populate the provided struct
archive_entry.
Return ARCHIVE_OK (the operation succeeded), ARCHIVE_WARN (the operation
succeeded but a non-critical error was encountered), ARCHIVE_EOF (end-of-
archive was encountered), ARCHIVE_RETRY (the operation failed but can be
retried), and ARCHIVE_FATAL (there was a fatal error; the archive should be
closed immediately).
"""
# int archive_read_next_header2(struct archive *, struct archive_entry *);
next_entry = libarchive.archive_read_next_header2
next_entry.argtypes = [c_void_p, c_void_p]
next_entry.restype = c_int
next_entry.errcheck = errcheck
"""
Read data associated with the header just read. Internally, this is a
convenience function that calls archive_read_data_block() and fills any gaps
with nulls so that callers see a single continuous stream of data.
"""
# ssize_t archive_read_data(struct archive *, void *buff, size_t len);
read_entry_data = libarchive.archive_read_data
read_entry_data.argtypes = [c_void_p, c_void_p, c_size_t]
read_entry_data.restype = c_ssize_t
read_entry_data.errcheck = errcheck
"""
Return the next available block of data for this entry. Unlike
archive_read_data(), the archive_read_data_block() function avoids copying
data and allows you to correctly handle sparse files, as supported by some
archive formats. The library guarantees that offsets will increase and that
blocks will not overlap. Note that the blocks returned from this function can
be much larger than the block size read from disk, due to compression and
internal buffer optimizations.
"""
# int archive_read_data_block(struct archive *, const void **buff, size_t *len, off_t *offset);
read_entry_data_block = libarchive.archive_read_data_block
read_entry_data_block.argtypes = [c_void_p, POINTER(c_void_p), POINTER(c_size_t), POINTER(c_longlong)]
read_entry_data_block.restype = c_int
read_entry_data_block.errcheck = errcheck
"""
Releases the struct archive_entry object.
The struct entry object must be freed when no longer needed.
"""
# void archive_entry_free(struct archive_entry *);
free_entry = libarchive.archive_entry_free
free_entry.argtypes = [c_void_p]
free_entry.restype = None
#
# Entry attributes: path, type, size, etc. are collected with these functions:
##############################
"""
The functions archive_entry_filetype() and archive_entry_set_filetype() get
respectively set the filetype. The file type is one of the following
constants:
AE_IFREG Regular file
AE_IFLNK Symbolic link
AE_IFSOCK Socket
AE_IFCHR Character device
AE_IFBLK Block device
AE_IFDIR Directory
AE_IFIFO Named pipe (fifo)
Not all file types are supported by all platforms. The constants used by
stat(2) may have different numeric values from the corresponding constants
above.
"""
# struct archive_entry * archive_entry_filetype(struct archive_entry *);
entry_type = libarchive.archive_entry_filetype
entry_type.argtypes = [c_void_p]
entry_type.restype = c_int
# TODO: check for nulls
# entry_type.errcheck = errcheck
"""
This function retrieves the mtime field in an archive_entry. (modification
time).
The timestamps are truncated automatically depending on the archive format
(for archiving) or the filesystem capabilities (for restoring).
All timestamp fields are optional. The XXX_unset() functions can be used to
mark the corresponding field as missing. The current state can be queried
using XXX_is_set(). Unset time fields have a second and nanosecond field of 0.
"""
# time_t archive_entry_mtime(struct archive_entry *);
entry_time = libarchive.archive_entry_mtime
entry_time.argtypes = [c_void_p]
entry_time.restype = c_int
"""
Path in the archive.
char * Multibyte strings in the current locale.
wchar_t * Wide character strings in the current locale.
"""
# const char * archive_entry_pathname(struct archive_entry *a);
entry_path = libarchive.archive_entry_pathname
entry_path.argtypes = [c_void_p]
entry_path.restype = c_char_p
# TODO: check for nulls
# entry_path.errcheck = None
"""
String data can be set or accessed as wide character strings or normal char
strings. The functions that use wide character strings are suffixed with _w.
Note that these are different representations of the same data: For example,
if you store a narrow string and read the corresponding wide string, the
object will transparently convert formats using the current locale
Similarly, if you store a wide string and then store a narrow string for the
same data, the previously-set wide string will be discarded in favor of the new
data.
"""
# const wchar_t * archive_entry_pathname_w(struct archive_entry *a);
entry_path_w = libarchive.archive_entry_pathname_w
entry_path_w.argtypes = [c_void_p]
entry_path_w.restype = c_wchar_p
# TODO: check for nulls?
# entry_path_w.errcheck = None
# int64_t archive_entry_size(struct archive_entry *a);
entry_size = libarchive.archive_entry_size
entry_size.argtypes = [c_void_p]
entry_size.restype = c_longlong
entry_size.errcheck = errcheck
"""
Destination of the hardlink.
"""
# const char * archive_entry_hardlink(struct archive_entry *a);
hardlink_path = libarchive.archive_entry_hardlink
hardlink_path.argtypes = [c_void_p]
hardlink_path.restype = c_char_p
# const wchar_t * archive_entry_hardlink_w(struct archive_entry *a);
hardlink_path_w = libarchive.archive_entry_hardlink_w
hardlink_path_w.argtypes = [c_void_p]
hardlink_path_w.restype = c_wchar_p
"""
The number of references (hardlinks) can be obtained by calling
archive_entry_nlinks()
"""
# unsigned int archive_entry_nlink(struct archive_entry *a);
hardlink_count = libarchive.archive_entry_nlink
hardlink_count.argtypes = [c_void_p]
hardlink_count.restype = c_int
"""
The functions archive_entry_dev() and archive_entry_ino64() are used by
ManPageArchiveEntryLinkify3 to find hardlinks. The pair of device and inode is
supposed to identify hardlinked files.
"""
# int64_t archive_entry_ino64(struct archive_entry *a);
# dev_t archive_entry_dev(struct archive_entry *a);
# int archive_entry_dev_is_set(struct archive_entry *a);
"""
Destination of the symbolic link.
"""
# const char * archive_entry_symlink(struct archive_entry *);
symlink_path = libarchive.archive_entry_symlink
symlink_path.argtypes = [c_void_p]
symlink_path.restype = c_char_p
symlink_path.errcheck = errcheck_null
# const wchar_t * archive_entry_symlink_w(struct archive_entry *);
symlink_path_w = libarchive.archive_entry_symlink_w
symlink_path_w.argtypes = [c_void_p]
symlink_path_w.restype = c_wchar_p
symlink_path_w.errcheck = errcheck_null
#
# Utilities and error handling
##############################
"""
Returns a numeric error code (see errno(2)) indicating the reason for the most
recent error return. Note that this can not be reliably used to detect whether
an error has occurred. It should be used only after another libarchive
function has returned an error status.
"""
# int archive_errno(struct archive *);
errno = libarchive.archive_errno
errno.argtypes = [c_void_p]
errno.restype = c_int
"""
Returns a textual error message suitable for display. The error message here
is usually more specific than that obtained from passing the result of
archive_errno() to strerror(3).
"""
# const char * archive_error_string(struct archive *);
err_msg = libarchive.archive_error_string
err_msg.argtypes = [c_void_p]
err_msg.restype = c_char_p
"""
Returns a count of the number of files processed by this archive object. The
count is incremented by calls to ManPageArchiveWriteHeader3 or
ManPageArchiveReadNextHeader3.
"""
# int archive_file_count(struct archive *);
"""
Returns a numeric code identifying the indicated filter. See
archive_filter_count() for details of the numbering.
"""
# int archive_filter_code(struct archive *, int);
"""
Returns the number of filters in the current pipeline. For read archive
handles, these filters are added automatically by the automatic format
detection.
"""
# int archive_filter_count(struct archive *, int);
"""
Synonym for archive_filter_code(a,(0)).
"""
# int archive_compression(struct archive *);
"""
Returns a textual name identifying the indicated filter. See
archive_filter_count() for details of the numbering.
"""
# const char * archive_filter_name(struct archive *, int);
"""
Synonym for archive_filter_name(a,(0)).
"""
# const char * archive_compression_name(struct archive *);
"""
Returns a numeric code indicating the format of the current archive entry.
This value is set by a successful call to archive_read_next_header(). Note
that it is common for this value to change from entry to entry. For example, a
tar archive might have several entries that utilize GNU tar extensions and
several entries that do not. These entries will have different format codes.
"""
# int archive_format(struct archive *);
"""
A textual description of the format of the current entry.
"""
# const char * archive_format_name(struct archive *);
|
vinodpanicker/scancode-toolkit
|
src/extractcode/libarchive2.py
|
Python
|
apache-2.0
| 28,804
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import os
import shutil
import sys
import unittest
import uuid
from pyflink.pyflink_gateway_server import on_windows
from pyflink.table import DataTypes
from pyflink.table.udf import udf
from pyflink.testing import source_sink_utils
from pyflink.testing.test_case_utils import (PyFlinkBlinkStreamTableTestCase,
PyFlinkBlinkBatchTableTestCase,
PyFlinkStreamTableTestCase,
PyFlinkBatchTableTestCase)
class DependencyTests(object):
def test_add_python_file(self):
python_file_dir = os.path.join(self.tempdir, "python_file_dir_" + str(uuid.uuid4()))
os.mkdir(python_file_dir)
python_file_path = os.path.join(python_file_dir, "test_dependency_manage_lib.py")
with open(python_file_path, 'w') as f:
f.write("def add_two(a):\n return a + 2")
self.t_env.add_python_file(python_file_path)
def plus_two(i):
from test_dependency_manage_lib import add_two
return add_two(i)
self.t_env.register_function("add_two", udf(plus_two, DataTypes.BIGINT(),
DataTypes.BIGINT()))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
t = self.t_env.from_elements([(1, 2), (2, 5), (3, 1)], ['a', 'b'])
t.select("add_two(a), a").insert_into("Results")
self.t_env.execute("test")
actual = source_sink_utils.results()
self.assert_equals(actual, ["3,1", "4,2", "5,3"])
class FlinkStreamDependencyTests(DependencyTests, PyFlinkStreamTableTestCase):
pass
class FlinkBatchDependencyTests(PyFlinkBatchTableTestCase):
def test_add_python_file(self):
python_file_dir = os.path.join(self.tempdir, "python_file_dir_" + str(uuid.uuid4()))
os.mkdir(python_file_dir)
python_file_path = os.path.join(python_file_dir, "test_dependency_manage_lib.py")
with open(python_file_path, 'w') as f:
f.write("def add_two(a):\n return a + 2")
self.t_env.add_python_file(python_file_path)
def plus_two(i):
from test_dependency_manage_lib import add_two
return add_two(i)
self.t_env.register_function("add_two", udf(plus_two, DataTypes.BIGINT(),
DataTypes.BIGINT()))
t = self.t_env.from_elements([(1, 2), (2, 5), (3, 1)], ['a', 'b'])\
.select("add_two(a), a")
result = self.collect(t)
self.assertEqual(result, ["3,1", "4,2", "5,3"])
class BlinkBatchDependencyTests(DependencyTests, PyFlinkBlinkBatchTableTestCase):
pass
class BlinkStreamDependencyTests(DependencyTests, PyFlinkBlinkStreamTableTestCase):
def test_set_requirements_without_cached_directory(self):
requirements_txt_path = os.path.join(self.tempdir, str(uuid.uuid4()))
with open(requirements_txt_path, 'w') as f:
f.write("cloudpickle==1.2.2")
self.t_env.set_python_requirements(requirements_txt_path)
def check_requirements(i):
import cloudpickle
assert os.path.abspath(cloudpickle.__file__).startswith(
os.environ['_PYTHON_REQUIREMENTS_INSTALL_DIR'])
return i
self.t_env.register_function("check_requirements",
udf(check_requirements, DataTypes.BIGINT(),
DataTypes.BIGINT()))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
t = self.t_env.from_elements([(1, 2), (2, 5), (3, 1)], ['a', 'b'])
t.select("check_requirements(a), a").insert_into("Results")
self.t_env.execute("test")
actual = source_sink_utils.results()
self.assert_equals(actual, ["1,1", "2,2", "3,3"])
def test_set_requirements_with_cached_directory(self):
tmp_dir = self.tempdir
requirements_txt_path = os.path.join(tmp_dir, "requirements_txt_" + str(uuid.uuid4()))
with open(requirements_txt_path, 'w') as f:
f.write("python-package1==0.0.0")
requirements_dir_path = os.path.join(tmp_dir, "requirements_dir_" + str(uuid.uuid4()))
os.mkdir(requirements_dir_path)
package_file_name = "python-package1-0.0.0.tar.gz"
with open(os.path.join(requirements_dir_path, package_file_name), 'wb') as f:
import base64
# This base64 data is encoded from a python package file which includes a
# "python_package1" module. The module contains a "plus(a, b)" function.
# The base64 can be recomputed by following code:
# base64.b64encode(open("python-package1-0.0.0.tar.gz", "rb").read()).decode("utf-8")
f.write(base64.b64decode(
"H4sICNefrV0C/2Rpc3QvcHl0aG9uLXBhY2thZ2UxLTAuMC4wLnRhcgDtmVtv2jAYhnPtX2H1CrRCY+ckI"
"XEx7axuUA11u5imyICTRc1JiVnHfv1MKKWjYxwKEdPehws7xkmUfH5f+3PyqfqWpa1cjG5EKFnLbOvfhX"
"FQTI3nOPPSdavS5Pa8nGMwy3Esi3ke9wyTObbnGNQxamBSKlFQavzUryG8ldG6frpbEGx4yNmDLMp/hPy"
"P8b+6fNN613vdP1z8XdteG3+ug/17/F3Hcw1qIv5H54NUYiyUaH2SRRllaYeytkl6IpEdujI2yH2XapCQ"
"wSRJRDHt0OveZa//uUfeZonUvUO5bHo+0ZcoVo9bMhFRvGx9H41kWj447aUsR0WUq+pui8arWKggK5Jli"
"wGOo/95q79ovXi6/nfyf246Dof/n078fT9KI+X77Xx6BP83bX4Xf5NxT7dz7toO/L8OxjKgeTwpG+KcDp"
"sdQjWFVJMipYI+o0MCk4X/t2UYtqI0yPabCHb3f861XcD/Ty/+Y5nLdCzT0dSPo/SmbKsf6un+b7KV+Ls"
"W4/D/OoC9w/930P9eGwM75//csrD+Q/6P/P/k9D/oX3988Wqw1bS/tf6tR+s/m3EG/ddBqXO9XKf15C8p"
"P9k4HZBtBgzZaVW5vrfKcj+W32W82ygEB9D/Xu9+4/qfP9L/rBv0X1v87yONKRX61/qfzwqjIDzIPTbv/"
"7or3/88i0H/tfBFW7s/s/avRInQH06ieEy7tDrQeYHUdRN7wP+n/vf62LOH/pld7f9xz7a5Pfufedy0oP"
"86iJI8KxStAq6yLC4JWdbbVbWRikR2z1ZGytk5vauW3QdnBFE6XqwmykazCesAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAOBw/AJw5CHBAFAAAA=="))
self.t_env.set_python_requirements(requirements_txt_path, requirements_dir_path)
def add_one(i):
from python_package1 import plus
return plus(i, 1)
self.t_env.register_function("add_one",
udf(add_one, DataTypes.BIGINT(),
DataTypes.BIGINT()))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
t = self.t_env.from_elements([(1, 2), (2, 5), (3, 1)], ['a', 'b'])
t.select("add_one(a), a").insert_into("Results")
self.t_env.execute("test")
actual = source_sink_utils.results()
self.assert_equals(actual, ["2,1", "3,2", "4,3"])
def test_add_python_archive(self):
tmp_dir = self.tempdir
archive_dir_path = os.path.join(tmp_dir, "archive_" + str(uuid.uuid4()))
os.mkdir(archive_dir_path)
with open(os.path.join(archive_dir_path, "data.txt"), 'w') as f:
f.write("2")
archive_file_path = \
shutil.make_archive(os.path.dirname(archive_dir_path), 'zip', archive_dir_path)
self.t_env.add_python_archive(archive_file_path, "data")
def add_from_file(i):
with open("data/data.txt", 'r') as f:
return i + int(f.read())
self.t_env.register_function("add_from_file",
udf(add_from_file, DataTypes.BIGINT(),
DataTypes.BIGINT()))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
t = self.t_env.from_elements([(1, 2), (2, 5), (3, 1)], ['a', 'b'])
t.select("add_from_file(a), a").insert_into("Results")
self.t_env.execute("test")
actual = source_sink_utils.results()
self.assert_equals(actual, ["3,1", "4,2", "5,3"])
@unittest.skipIf(on_windows(), "Symbolic link is not supported on Windows, skipping.")
def test_set_environment(self):
python_exec = sys.executable
tmp_dir = self.tempdir
python_exec_link_path = os.path.join(tmp_dir, "py_exec")
os.symlink(python_exec, python_exec_link_path)
self.t_env.get_config().set_python_executable(python_exec_link_path)
def check_python_exec(i):
import os
assert os.environ["python"] == python_exec_link_path
return i
self.t_env.register_function("check_python_exec",
udf(check_python_exec, DataTypes.BIGINT(),
DataTypes.BIGINT()))
def check_pyflink_gateway_disabled(i):
try:
from pyflink.java_gateway import get_gateway
get_gateway()
except Exception as e:
assert str(e).startswith("It's launching the PythonGatewayServer during Python UDF"
" execution which is unexpected.")
else:
raise Exception("The gateway server is not disabled!")
return i
self.t_env.register_function("check_pyflink_gateway_disabled",
udf(check_pyflink_gateway_disabled, DataTypes.BIGINT(),
DataTypes.BIGINT()))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
t = self.t_env.from_elements([(1, 2), (2, 5), (3, 1)], ['a', 'b'])
t.select("check_python_exec(a), check_pyflink_gateway_disabled(a)").insert_into("Results")
self.t_env.execute("test")
actual = source_sink_utils.results()
self.assert_equals(actual, ["1,1", "2,2", "3,3"])
if __name__ == "__main__":
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
jinglining/flink
|
flink-python/pyflink/table/tests/test_dependency.py
|
Python
|
apache-2.0
| 11,436
|
"""Constants for the Vilfo Router integration."""
from __future__ import annotations
from dataclasses import dataclass
from homeassistant.components.sensor import SensorEntityDescription
from homeassistant.const import DEVICE_CLASS_TIMESTAMP, PERCENTAGE
DOMAIN = "vilfo"
ATTR_API_DATA_FIELD_LOAD = "load"
ATTR_API_DATA_FIELD_BOOT_TIME = "boot_time"
ATTR_LOAD = "load"
ATTR_BOOT_TIME = "boot_time"
ROUTER_DEFAULT_HOST = "admin.vilfo.com"
ROUTER_DEFAULT_MODEL = "Vilfo Router"
ROUTER_DEFAULT_NAME = "Vilfo Router"
ROUTER_MANUFACTURER = "Vilfo AB"
@dataclass
class VilfoRequiredKeysMixin:
"""Mixin for required keys."""
api_key: str
@dataclass
class VilfoSensorEntityDescription(SensorEntityDescription, VilfoRequiredKeysMixin):
"""Describes Vilfo sensor entity."""
SENSOR_TYPES: tuple[VilfoSensorEntityDescription, ...] = (
VilfoSensorEntityDescription(
key=ATTR_LOAD,
name="Load",
native_unit_of_measurement=PERCENTAGE,
icon="mdi:memory",
api_key=ATTR_API_DATA_FIELD_LOAD,
),
VilfoSensorEntityDescription(
key=ATTR_BOOT_TIME,
name="Boot time",
icon="mdi:timer-outline",
api_key=ATTR_API_DATA_FIELD_BOOT_TIME,
device_class=DEVICE_CLASS_TIMESTAMP,
),
)
|
jawilson/home-assistant
|
homeassistant/components/vilfo/const.py
|
Python
|
apache-2.0
| 1,270
|
"""The Brother component."""
from __future__ import annotations
from datetime import timedelta
import logging
from brother import Brother, DictToObj, SnmpError, UnsupportedModel
import pysnmp.hlapi.asyncio as SnmpEngine
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DATA_CONFIG_ENTRY, DOMAIN, SNMP
from .utils import get_snmp_engine
PLATFORMS = [Platform.SENSOR]
SCAN_INTERVAL = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Brother from a config entry."""
host = entry.data[CONF_HOST]
kind = entry.data[CONF_TYPE]
snmp_engine = get_snmp_engine(hass)
coordinator = BrotherDataUpdateCoordinator(
hass, host=host, kind=kind, snmp_engine=snmp_engine
)
await coordinator.async_config_entry_first_refresh()
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN].setdefault(DATA_CONFIG_ENTRY, {})
hass.data[DOMAIN][DATA_CONFIG_ENTRY][entry.entry_id] = coordinator
hass.data[DOMAIN][SNMP] = snmp_engine
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN][DATA_CONFIG_ENTRY].pop(entry.entry_id)
if not hass.data[DOMAIN][DATA_CONFIG_ENTRY]:
hass.data[DOMAIN].pop(SNMP)
hass.data[DOMAIN].pop(DATA_CONFIG_ENTRY)
return unload_ok
class BrotherDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Brother data from the printer."""
def __init__(
self, hass: HomeAssistant, host: str, kind: str, snmp_engine: SnmpEngine
) -> None:
"""Initialize."""
self.brother = Brother(host, kind=kind, snmp_engine=snmp_engine)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> DictToObj:
"""Update data via library."""
try:
data = await self.brother.async_update()
except (ConnectionError, SnmpError, UnsupportedModel) as error:
raise UpdateFailed(error) from error
return data
|
jawilson/home-assistant
|
homeassistant/components/brother/__init__.py
|
Python
|
apache-2.0
| 2,597
|
from pulsar.tools.authorization import get_authorizer
from .test_utils import get_test_toolbox, TestCase
def test_allow_any_authorization():
authorizer = get_authorizer(None)
authorization = authorizer.get_authorization('tool1')
authorization.authorize_setup()
authorization.authorize_tool_file('cow', '#!/bin/bash\necho "Hello World!"')
class ToolBasedAuthorizationTestCase(TestCase):
def setUp(self):
self.toolbox = get_test_toolbox()
self.authorizer = get_authorizer(self.toolbox)
def test_valid_setup_passes(self):
self.authorizer.get_authorization('tool1').authorize_setup()
def test_invalid_setup_fails(self):
with self.unauthorized_expectation():
self.authorizer.get_authorization('tool2').authorize_setup()
def test_valid_tool_file_passes(self):
authorization = self.authorizer.get_authorization('tool1')
authorization.authorize_tool_file('tool1_wrapper.py', 'print \'Hello World!\'\n')
def test_invalid_tool_file_fails(self):
authorization = self.authorizer.get_authorization('tool1')
with self.unauthorized_expectation():
authorization.authorize_tool_file('tool1_wrapper.py', '#!/bin/sh\nrm -rf /valuable/data')
def unauthorized_expectation(self):
return self.assertRaises(Exception)
|
galaxyproject/pulsar
|
test/authorization_test.py
|
Python
|
apache-2.0
| 1,340
|
#*****************************************************************************
# Copyright 2004-2008 Steve Menard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#*****************************************************************************
import collections
from . import _jclass
class _WrappedIterator(object):
"""
Wraps a Java iterator to respect the Python 3 iterator API
"""
def __init__(self, iterator):
self.iterator = iterator
def __iter__(self):
return self.iterator
def __next__(self):
return next(self.iterator)
# Compatibility name
next = __next__
def _initialize():
_jclass.registerClassCustomizer(CollectionCustomizer())
_jclass.registerClassCustomizer(ListCustomizer())
_jclass.registerClassCustomizer(MapCustomizer())
_jclass.registerClassCustomizer(IteratorCustomizer())
_jclass.registerClassCustomizer(EnumerationCustomizer())
def isPythonSequence(v):
if isinstance(v, collections.Sequence):
if not hasattr(v.__class__, '__metaclass__') \
or v.__class__.__metaclass__ is _jclass._JavaClass:
return True
return False
def _colLength(self):
return self.size()
def _colIter(self):
return _WrappedIterator(self.iterator())
def _colDelItem(self, i):
return self.remove(i)
def _colAddAll(self, v):
if isPythonSequence(v):
r = False
for i in v:
r = self.add(i) or r
return r
else:
return self._addAll(v)
def _colRemoveAll(self, v):
if isPythonSequence(v):
r = False
for i in v:
r = self.remove(i) or r
return r
else:
return self._removeAll(v)
def _colRetainAll(self, v):
if isPythonSequence(v):
r = _jclass.JClass("java.util.ArrayList")(len(v))
for i in v:
r.add(i)
else:
r = v
return self._retainAll(r)
class CollectionCustomizer(object):
_METHODS = {
'__len__': _colLength,
'__iter__': _colIter,
'__delitem__': _colDelItem,
}
def canCustomize(self, name, jc):
if name == 'java.util.Collection':
return True
return jc.isSubclass('java.util.Collection')
def customize(self, name, jc, bases, members):
if name == 'java.util.Collection':
members.update(CollectionCustomizer._METHODS)
else:
# AddAll is handled by List
if (not jc.isSubclass("java.util.List")) and 'addAll' in members:
members['_addAll'] = members['addAll']
members['addAll'] = _colAddAll
if 'removeAll' in members:
members['_removeAll'] = members['removeAll']
members['removeAll'] = _colRemoveAll
if 'retainAll' in members:
members['_retainAll'] = members['retainAll']
members['retainAll'] = _colRetainAll
def _listGetItem(self, ndx):
if isinstance(ndx, slice):
start = ndx.start
stop = ndx.stop
if start < 0:
start = self.size() + start
if stop < 0:
stop = self.size() + stop
return self.subList(start, stop)
else:
if ndx < 0:
ndx = self.size() + ndx
return self.get(ndx)
def _listSetItem(self, ndx, v):
if isinstance(ndx, slice):
start = ndx.start
stop = ndx.stop
if start < 0:
start = self.size() + start
if stop < 0:
stop = self.size() + stop
for i in range(start, stop):
self.remove(start)
if isinstance(v, collections.Sequence):
ndx = start
for i in v:
self.add(ndx, i)
ndx += 1
else:
if ndx < 0:
ndx = self.size() + ndx
self.set(ndx, v)
def _listAddAll(self, v, v2=None):
if isPythonSequence(v):
r = False
if v2 is not None: # assume form (int, values)
for i in range(len(v2)):
r = r or self.add(v + i, v2[i])
else:
for i in v:
r = self.add(i) or r
return r
else:
return self._addAll(v)
class ListCustomizer(object):
_METHODS = {
'__setitem__': _listSetItem,
'__getitem__': _listGetItem,
}
def canCustomize(self, name, jc):
if name == 'java.util.List':
return True
return jc.isSubclass('java.util.List')
def customize(self, name, jc, bases, members):
if name == 'java.util.List':
members.update(ListCustomizer._METHODS)
else:
if 'addAll' in members:
members['_addAll'] = members['addAll']
members['addAll'] = _listAddAll
def isPythonMapping(v):
if isinstance(v, collections.Mapping):
if not hasattr(v.__class__, '__metaclass__') or \
v.__class__.__metaclass__ is _jclass._JavaClass:
return True
return False
def _mapLength(self):
return self.size()
def _mapIter(self):
return _WrappedIterator(self.keySet().iterator())
def _mapDelItem(self, i):
return self.remove(i)
def _mapGetItem(self, ndx):
return self.get(ndx)
def _mapSetItem(self, ndx, v):
self.put(ndx, v)
def _mapPutAll(self, v):
if isPythonMapping(v):
for i in v:
self.put(i, v[i])
else:
# do the regular method ...
self._putAll(v)
class MapCustomizer(object):
_METHODS = {
'__len__': _mapLength,
'__iter__': _mapIter,
'__delitem__': _mapDelItem,
'__getitem__': _mapGetItem,
'__setitem__': _mapSetItem,
}
def canCustomize(self, name, jc):
if name == 'java.util.Map':
return True
return jc.isSubclass('java.util.Map')
def customize(self, name, jc, bases, members):
if name == 'java.util.Map':
members.update(MapCustomizer._METHODS)
else:
if "putAll" in members:
members["_putAll"] = members["putAll"]
members["putAll"] = _mapPutAll
def _iterCustomNext(self):
if self.hasNext():
return self._next()
raise StopIteration
def _iterIteratorNext(self):
if self.hasNext():
return next(self)
raise StopIteration
def _iterIter(self):
return self
class IteratorCustomizer(object):
_METHODS = {
'__iter__': _iterIter,
'__next__': _iterCustomNext,
}
def canCustomize(self, name, jc):
if name == 'java.util.Iterator':
return True
return jc.isSubclass('java.util.Iterator')
def customize(self, name, jc, bases, members):
if name == 'java.util.Iterator':
members.update(IteratorCustomizer._METHODS)
elif jc.isSubclass('java.util.Iterator'):
__next__ = 'next' if 'next' in members else '__next__'
members['_next'] = members[__next__]
members[__next__] = _iterCustomNext
def _enumNext(self):
if self.hasMoreElements():
return self.nextElement()
raise StopIteration
def _enumIter(self):
return self
class EnumerationCustomizer(object):
_METHODS = {
'next': _enumNext,
'__next__': _enumNext,
'__iter__': _enumIter,
}
def canCustomize(self, name, jc):
return name == 'java.util.Enumeration'
def customize(self, name, jc, bases, members):
members.update(EnumerationCustomizer._METHODS)
|
ktan2020/jpype
|
jpype/_jcollection.py
|
Python
|
apache-2.0
| 8,284
|
import django
if django.VERSION >= (3, 2):
# The declaration is only needed for older Django versions
pass
else:
default_app_config = (
"wagtail.contrib.simple_translation.apps.SimpleTranslationAppConfig"
)
|
zerolab/wagtail
|
wagtail/contrib/simple_translation/__init__.py
|
Python
|
bsd-3-clause
| 233
|
"""
sentry.interfaces.template
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
__all__ = ('Template',)
from sentry.interfaces.base import Interface, InterfaceValidationError
from sentry.interfaces.stacktrace import get_context
from sentry.utils.safe import trim
class Template(Interface):
"""
A rendered template (generally used like a single frame in a stacktrace).
The attributes ``filename``, ``context_line``, and ``lineno`` are required.
>>> {
>>> "abs_path": "/real/file/name.html"
>>> "filename": "file/name.html",
>>> "pre_context": [
>>> "line1",
>>> "line2"
>>> ],
>>> "context_line": "line3",
>>> "lineno": 3,
>>> "post_context": [
>>> "line4",
>>> "line5"
>>> ],
>>> }
.. note:: This interface can be passed as the 'template' key in addition
to the full interface path.
"""
score = 1100
@classmethod
def to_python(cls, data):
if not data.get('filename'):
raise InterfaceValidationError("Missing 'filename'")
if not data.get('context_line'):
raise InterfaceValidationError("Missing 'context_line'")
if not data.get('lineno'):
raise InterfaceValidationError("Missing 'lineno'")
kwargs = {
'abs_path': trim(data.get('abs_path', None), 256),
'filename': trim(data['filename'], 256),
'context_line': trim(data.get('context_line', None), 256),
'lineno': int(data['lineno']),
# TODO(dcramer): trim pre/post_context
'pre_context': data.get('pre_context'),
'post_context': data.get('post_context'),
}
return cls(**kwargs)
def get_alias(self):
return 'template'
def get_path(self):
return 'sentry.interfaces.Template'
def get_hash(self):
return [self.filename, self.context_line]
def to_string(self, event, is_public=False, **kwargs):
context = get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename,
)
result = [
'Stacktrace (most recent call last):', '',
self.get_traceback(event, context)
]
return '\n'.join(result)
def get_traceback(self, event, context):
result = [
event.message, '',
'File "%s", line %s' % (self.filename, self.lineno), '',
]
result.extend([n[1].strip('\n') for n in context])
return '\n'.join(result)
def get_api_context(self, is_public=False):
return {
'lineNo': self.lineno,
'filename': self.filename,
'context': get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename,
),
}
|
JackDanger/sentry
|
src/sentry/interfaces/template.py
|
Python
|
bsd-3-clause
| 3,261
|
##########################################################################
#
# Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios),
# its affiliates and/or its licensors.
#
# Copyright (c) 2010-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import hou
import IECore
import IECoreHoudini
import unittest
import os
class TestFromHoudiniPointsConverter( IECoreHoudini.TestCase ) :
def createBox( self ) :
obj = hou.node("/obj")
geo = obj.createNode("geo", run_init_scripts=False)
box = geo.createNode( "box" )
return box
def createTorus( self ) :
obj = hou.node("/obj")
geo = obj.createNode("geo", run_init_scripts=False)
torus = geo.createNode( "torus" )
return torus
def createPoints( self ) :
obj = hou.node("/obj")
geo = obj.createNode("geo", run_init_scripts=False)
box = geo.createNode( "box" )
facet = geo.createNode( "facet" )
facet.parm("postnml").set(True)
points = geo.createNode( "scatter" )
facet.setInput( 0, box )
points.setInput( 0, facet )
return points
# creates a converter
def testCreateConverter( self ) :
box = self.createBox()
converter = IECoreHoudini.FromHoudiniPointsConverter( box )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
return converter
# creates a converter
def testFactory( self ) :
box = self.createBox()
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( box )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPolygonsConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( box, resultType = IECore.TypeId.PointsPrimitive )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( box, resultType = IECore.TypeId.Parameter )
self.assertEqual( converter, None )
self.failUnless( IECore.TypeId.PointsPrimitive in IECoreHoudini.FromHoudiniGeometryConverter.supportedTypes() )
converter = IECoreHoudini.FromHoudiniGeometryConverter.createDummy( IECore.TypeId.PointsPrimitive )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.createDummy( [ IECore.TypeId.PointsPrimitive ] )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
# performs geometry conversion
def testDoConversion( self ) :
converter = self.testCreateConverter()
result = converter.convert()
self.assert_( result.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
def testConvertFromHOMGeo( self ) :
geo = self.createPoints().geometry()
converter = IECoreHoudini.FromHoudiniGeometryConverter.createFromGeo( geo )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
result = converter.convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
converter2 = IECoreHoudini.FromHoudiniGeometryConverter.createFromGeo( geo, IECore.TypeId.PointsPrimitive )
self.failUnless( converter2.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
# convert a mesh
def testConvertMesh( self ) :
torus = self.createTorus()
converter = IECoreHoudini.FromHoudiniPointsConverter( torus )
result = converter.convert()
self.assertEqual( result.typeId(), IECore.PointsPrimitive.staticTypeId() )
bbox = result.bound()
self.assertEqual( bbox.min.x, -1.5 )
self.assertEqual( bbox.max.x, 1.5 )
self.assertEqual( result.numPoints, 100 )
for i in range( result.numPoints ) :
self.assert_( result["P"].data[i].x >= bbox.min.x )
self.assert_( result["P"].data[i].x <= bbox.max.x )
# test prim/vertex attributes
def testConvertPrimVertAttributes( self ) :
torus = self.createTorus()
geo = torus.parent()
# add vertex normals
facet = geo.createNode( "facet", node_name = "add_point_normals" )
facet.parm("postnml").set(True)
facet.setInput( 0, torus )
# add a primitive colour attributes
primcol = geo.createNode( "primitive", node_name = "prim_colour" )
primcol.parm("doclr").set(1)
primcol.parm("diffr").setExpression("rand($PR)")
primcol.parm("diffg").setExpression("rand($PR+1)")
primcol.parm("diffb").setExpression("rand($PR+2)")
primcol.setInput( 0, facet )
# add a load of different vertex attributes
vert_f1 = geo.createNode( "attribcreate", node_name = "vert_f1", exact_type_name=True )
vert_f1.parm("name").set("vert_f1")
vert_f1.parm("class").set(3)
vert_f1.parm("value1").setExpression("$VTX*0.1")
vert_f1.setInput( 0, primcol )
vert_f2 = geo.createNode( "attribcreate", node_name = "vert_f2", exact_type_name=True )
vert_f2.parm("name").set("vert_f2")
vert_f2.parm("class").set(3)
vert_f2.parm("size").set(2)
vert_f2.parm("value1").setExpression("$VTX*0.1")
vert_f2.parm("value2").setExpression("$VTX*0.1")
vert_f2.setInput( 0, vert_f1 )
vert_f3 = geo.createNode( "attribcreate", node_name = "vert_f3", exact_type_name=True )
vert_f3.parm("name").set("vert_f3")
vert_f3.parm("class").set(3)
vert_f3.parm("size").set(3)
vert_f3.parm("value1").setExpression("$VTX*0.1")
vert_f3.parm("value2").setExpression("$VTX*0.1")
vert_f3.parm("value3").setExpression("$VTX*0.1")
vert_f3.setInput( 0, vert_f2 )
vert_i1 = geo.createNode( "attribcreate", node_name = "vert_i1", exact_type_name=True )
vert_i1.parm("name").set("vert_i1")
vert_i1.parm("class").set(3)
vert_i1.parm("type").set(1)
vert_i1.parm("value1").setExpression("$VTX*0.1")
vert_i1.setInput( 0, vert_f3 )
vert_i2 = geo.createNode( "attribcreate", node_name = "vert_i2", exact_type_name=True )
vert_i2.parm("name").set("vert_i2")
vert_i2.parm("class").set(3)
vert_i2.parm("type").set(1)
vert_i2.parm("size").set(2)
vert_i2.parm("value1").setExpression("$VTX*0.1")
vert_i2.parm("value2").setExpression("$VTX*0.1")
vert_i2.setInput( 0, vert_i1 )
vert_i3 = geo.createNode( "attribcreate", node_name = "vert_i3", exact_type_name=True )
vert_i3.parm("name").set("vert_i3")
vert_i3.parm("class").set(3)
vert_i3.parm("type").set(1)
vert_i3.parm("size").set(3)
vert_i3.parm("value1").setExpression("$VTX*0.1")
vert_i3.parm("value2").setExpression("$VTX*0.1")
vert_i3.parm("value3").setExpression("$VTX*0.1")
vert_i3.setInput( 0, vert_i2 )
vert_v3f = geo.createNode( "attribcreate", node_name = "vert_v3f", exact_type_name=True )
vert_v3f.parm("name").set("vert_v3f")
vert_v3f.parm("class").set(3)
vert_v3f.parm("type").set(2)
vert_v3f.parm("value1").setExpression("$VTX*0.1")
vert_v3f.parm("value2").setExpression("$VTX*0.1")
vert_v3f.parm("value3").setExpression("$VTX*0.1")
vert_v3f.setInput( 0, vert_i3 )
detail_i3 = geo.createNode( "attribcreate", node_name = "detail_i3", exact_type_name=True )
detail_i3.parm("name").set("detail_i3")
detail_i3.parm("class").set(0)
detail_i3.parm("type").set(1)
detail_i3.parm("size").set(3)
detail_i3.parm("value1").set(123)
detail_i3.parm("value2").set(456.789) # can we catch it out with a float?
detail_i3.parm("value3").set(789)
detail_i3.setInput( 0, vert_v3f )
out = geo.createNode( "null", node_name="OUT" )
out.setInput( 0, detail_i3 )
# convert it all
converter = IECoreHoudini.FromHoudiniPointsConverter( out )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
result = converter.convert()
self.assert_( result.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
bbox = result.bound()
self.assertEqual( bbox.min.x, -1.5 )
self.assertEqual( bbox.max.x, 1.5 )
self.assertEqual( result.numPoints, 100 )
for i in range( result.numPoints ) :
self.assert_( result["P"].data[i].x >= bbox.min.x )
self.assert_( result["P"].data[i].x <= bbox.max.x )
# test point attributes
self.assert_( "P" in result )
self.assertEqual( result['P'].data.typeId(), IECore.TypeId.V3fVectorData )
self.assertEqual( result['P'].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( result['P'].data.size(), result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assert_( "N" in result )
self.assertEqual( result['N'].data.typeId(), IECore.TypeId.V3fVectorData )
self.assertEqual( result['N'].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( result['N'].data.size(), result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( result["N"].data.getInterpretation(), IECore.GeometricData.Interpretation.Normal )
# test detail attributes
self.assert_( "detail_i3" in result )
self.assertEqual( result['detail_i3'].data.typeId(), IECore.TypeId.V3iData )
self.assertEqual( result['detail_i3'].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assertEqual( result['detail_i3'].data.value.x, 123 )
self.assertEqual( result['detail_i3'].data.value.y, 456 )
self.assertEqual( result['detail_i3'].data.value.z, 789 )
# test primitive attributes
self.assert_( "Cd" not in result )
# test vertex attributes
attrs = [ "vert_f1", "vert_f2", "vert_f3", "vert_i1", "vert_i2", "vert_i3", "vert_v3f" ]
for a in attrs :
self.assert_( a not in result )
self.assert_( result.arePrimitiveVariablesValid() )
# test prim/vertex attributes on a single primitive (mesh)
def testConvertMeshPrimVertAttributes( self ) :
torus = self.createTorus()
torus.parm( "type" ).set( 1 )
geo = torus.parent()
# add vertex normals
facet = geo.createNode( "facet", node_name = "add_point_normals" )
facet.parm("postnml").set(True)
facet.setInput( 0, torus )
# add a primitive colour attributes
primcol = geo.createNode( "primitive", node_name = "prim_colour" )
primcol.parm("doclr").set(1)
primcol.parm("diffr").setExpression("rand($PR)")
primcol.parm("diffg").setExpression("rand($PR+1)")
primcol.parm("diffb").setExpression("rand($PR+2)")
primcol.setInput( 0, facet )
# add a load of different vertex attributes
vert_f1 = geo.createNode( "attribcreate", node_name = "vert_f1", exact_type_name=True )
vert_f1.parm("name").set("vert_f1")
vert_f1.parm("class").set(3)
vert_f1.parm("value1").setExpression("$VTX*0.1")
vert_f1.setInput( 0, primcol )
vert_f2 = geo.createNode( "attribcreate", node_name = "vert_f2", exact_type_name=True )
vert_f2.parm("name").set("vert_f2")
vert_f2.parm("class").set(3)
vert_f2.parm("size").set(2)
vert_f2.parm("value1").setExpression("$VTX*0.1")
vert_f2.parm("value2").setExpression("$VTX*0.1")
vert_f2.setInput( 0, vert_f1 )
vert_f3 = geo.createNode( "attribcreate", node_name = "vert_f3", exact_type_name=True )
vert_f3.parm("name").set("vert_f3")
vert_f3.parm("class").set(3)
vert_f3.parm("size").set(3)
vert_f3.parm("value1").setExpression("$VTX*0.1")
vert_f3.parm("value2").setExpression("$VTX*0.1")
vert_f3.parm("value3").setExpression("$VTX*0.1")
vert_f3.setInput( 0, vert_f2 )
vert_i1 = geo.createNode( "attribcreate", node_name = "vert_i1", exact_type_name=True )
vert_i1.parm("name").set("vert_i1")
vert_i1.parm("class").set(3)
vert_i1.parm("type").set(1)
vert_i1.parm("value1").setExpression("$VTX*0.1")
vert_i1.setInput( 0, vert_f3 )
vert_i2 = geo.createNode( "attribcreate", node_name = "vert_i2", exact_type_name=True )
vert_i2.parm("name").set("vert_i2")
vert_i2.parm("class").set(3)
vert_i2.parm("type").set(1)
vert_i2.parm("size").set(2)
vert_i2.parm("value1").setExpression("$VTX*0.1")
vert_i2.parm("value2").setExpression("$VTX*0.1")
vert_i2.setInput( 0, vert_i1 )
vert_i3 = geo.createNode( "attribcreate", node_name = "vert_i3", exact_type_name=True )
vert_i3.parm("name").set("vert_i3")
vert_i3.parm("class").set(3)
vert_i3.parm("type").set(1)
vert_i3.parm("size").set(3)
vert_i3.parm("value1").setExpression("$VTX*0.1")
vert_i3.parm("value2").setExpression("$VTX*0.1")
vert_i3.parm("value3").setExpression("$VTX*0.1")
vert_i3.setInput( 0, vert_i2 )
vert_v3f = geo.createNode( "attribcreate", node_name = "vert_v3f", exact_type_name=True )
vert_v3f.parm("name").set("vert_v3f")
vert_v3f.parm("class").set(3)
vert_v3f.parm("type").set(2)
vert_v3f.parm("value1").setExpression("$VTX*0.1")
vert_v3f.parm("value2").setExpression("$VTX*0.1")
vert_v3f.parm("value3").setExpression("$VTX*0.1")
vert_v3f.setInput( 0, vert_i3 )
vertString = geo.createNode( "attribcreate", node_name = "vertString", exact_type_name=True )
vertString.parm("name").set("vertString")
vertString.parm("class").set(3)
vertString.parm("type").set(3)
vertString.parm("string").set("string $VTX!")
vertString.setInput( 0, vert_v3f )
detail_i3 = geo.createNode( "attribcreate", node_name = "detail_i3", exact_type_name=True )
detail_i3.parm("name").set("detail_i3")
detail_i3.parm("class").set(0)
detail_i3.parm("type").set(1)
detail_i3.parm("size").set(3)
detail_i3.parm("value1").set(123)
detail_i3.parm("value2").set(456.789) # can we catch it out with a float?
detail_i3.parm("value3").set(789)
detail_i3.setInput( 0, vertString )
out = geo.createNode( "null", node_name="OUT" )
out.setInput( 0, detail_i3 )
# convert it all
converter = IECoreHoudini.FromHoudiniPointsConverter( out )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
result = converter.convert()
self.assert_( result.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
bbox = result.bound()
self.assertEqual( bbox.min.x, -1.5 )
self.assertEqual( bbox.max.x, 1.5 )
self.assertEqual( result.numPoints, 100 )
for i in range( result.numPoints ) :
self.assert_( result["P"].data[i].x >= bbox.min.x )
self.assert_( result["P"].data[i].x <= bbox.max.x )
# test point attributes
self.assert_( "P" in result )
self.assertEqual( result['P'].data.typeId(), IECore.TypeId.V3fVectorData )
self.assertEqual( result['P'].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( result['P'].data.size(), result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assert_( "N" in result )
self.assertEqual( result['N'].data.typeId(), IECore.TypeId.V3fVectorData )
self.assertEqual( result['N'].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( result['N'].data.size(), result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( result["N"].data.getInterpretation(), IECore.GeometricData.Interpretation.Normal )
# test detail attributes
self.assert_( "detail_i3" in result )
self.assertEqual( result['detail_i3'].data.typeId(), IECore.TypeId.V3iData )
self.assertEqual( result['detail_i3'].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assertEqual( result['detail_i3'].data.value.x, 123 )
self.assertEqual( result['detail_i3'].data.value.y, 456 )
self.assertEqual( result['detail_i3'].data.value.z, 789 )
# test primitive attributes
self.assert_( "Cs" in result )
self.assertEqual( result["Cs"].data.typeId(), IECore.TypeId.Color3fVectorData )
self.assertEqual( result["Cs"].interpolation, IECore.PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( result["Cs"].data.size(), result.variableSize( IECore.PrimitiveVariable.Interpolation.Uniform ) )
for i in range( 0, result.variableSize( IECore.PrimitiveVariable.Interpolation.Uniform ) ) :
for j in range( 0, 3 ) :
self.assert_( result["Cs"].data[i][j] >= 0.0 )
self.assert_( result["Cs"].data[i][j] <= 1.0 )
# test vertex attributes
attrs = [ "vert_f1", "vert_f2", "vert_f3", "vert_i1", "vert_i2", "vert_i3", "vert_v3f", "vertStringIndices" ]
for a in attrs :
self.assert_( a in result )
self.assertEqual( result[a].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( result[a].data.size(), result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( result["vert_f1"].data.typeId(), IECore.FloatVectorData.staticTypeId() )
self.assertEqual( result["vert_f2"].data.typeId(), IECore.V2fVectorData.staticTypeId() )
self.assertEqual( result["vert_f3"].data.typeId(), IECore.V3fVectorData.staticTypeId() )
for i in range( 0, result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) ) :
for j in range( 0, 3 ) :
self.assert_( result["vert_f3"].data[i][j] >= 0.0 )
self.assert_( result["vert_f3"].data[i][j] < 400.1 )
self.assertEqual( result["vert_i1"].data.typeId(), IECore.IntVectorData.staticTypeId() )
self.assertEqual( result["vert_i2"].data.typeId(), IECore.V2iVectorData.staticTypeId() )
self.assertEqual( result["vert_i3"].data.typeId(), IECore.V3iVectorData.staticTypeId() )
for i in range( 0, result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) ) :
for j in range( 0, 3 ) :
self.assert_( result["vert_i3"].data[i][j] < 10 )
self.assertEqual( result["vert_v3f"].data.typeId(), IECore.V3fVectorData.staticTypeId() )
self.assertEqual( result["vertString"].data.typeId(), IECore.TypeId.StringVectorData )
self.assertEqual( result["vertString"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assertEqual( result["vertStringIndices"].data.typeId(), IECore.TypeId.IntVectorData )
for i in range( 0, result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ) ) :
self.assertEqual( result["vertString"].data[i], "string %d!" % i )
self.assertEqual( result["vertStringIndices"].data[i], i )
self.assert_( result.arePrimitiveVariablesValid() )
# convert some points
def testConvertPoints( self ) :
points = self.createPoints()
converter = IECoreHoudini.FromHoudiniPointsConverter( points )
result = converter.convert()
self.assertEqual( result.typeId(), IECore.PointsPrimitive.staticTypeId() )
self.assertEqual( points.parm('npts').eval(), result.numPoints )
self.assert_( "P" in result.keys() )
self.assert_( "N" in result.keys() )
self.assert_( result.arePrimitiveVariablesValid() )
# simple attribute conversion
def testSetupAttributes( self ) :
points = self.createPoints()
geo = points.parent()
attr = geo.createNode( "attribcreate", exact_type_name=True )
attr.setInput( 0, points )
attr.parm("name").set( "test_attribute" )
attr.parm("type").set(0) # float
attr.parm("size").set(1) # 1 element
attr.parm("value1").set(123.456)
attr.parm("value2").set(654.321)
converter = IECoreHoudini.FromHoudiniPointsConverter( attr )
result = converter.convert()
self.assert_( "test_attribute" in result.keys() )
self.assertEqual( result["test_attribute"].data.size(), points.parm('npts').eval() )
self.assert_( result.arePrimitiveVariablesValid() )
return attr
# testing point attributes and types
def testPointAttributes( self ) :
attr = self.testSetupAttributes()
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.FloatVectorData )
self.assert_( result["test_attribute"].data[0] > 123.0 )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(1) # integer
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.IntVectorData )
self.assertEqual( result["test_attribute"].data[0], 123 )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(0) # float
attr.parm("size").set(2) # 2 elementS
attr.parm("value2").set(456.789)
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V2fVectorData )
self.assertEqual( result["test_attribute"].data[0], IECore.V2f( 123.456, 456.789 ) )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(1) # int
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V2iVectorData )
self.assertEqual( result["test_attribute"].data[0], IECore.V2i( 123, 456 ) )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(0) # float
attr.parm("size").set(3) # 3 elements
attr.parm("value3").set(999.999)
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V3fVectorData )
self.assertEqual( result["test_attribute"].data[0],IECore.V3f( 123.456, 456.789, 999.999 ) )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(1) # int
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V3iVectorData )
self.assertEqual( result["test_attribute"].data[0], IECore.V3i( 123, 456, 999 ) )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set( 3 ) # string
attr.parm( "string" ).set( "string $PT!" )
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.StringVectorData )
self.assertEqual( result["test_attribute"].data[10], "string 10!" )
self.assertEqual( result["test_attribute"].data.size(), 5000 )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assertEqual( result["test_attributeIndices"].data.typeId(), IECore.TypeId.IntVectorData )
self.assertEqual( result["test_attributeIndices"].data[10], 10 )
self.assertEqual( result["test_attributeIndices"].data.size(), 5000 )
self.assertEqual( result["test_attributeIndices"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assert_( result.arePrimitiveVariablesValid() )
# testing detail attributes and types
def testDetailAttributes( self ) :
attr = self.testSetupAttributes()
attr.parm("class").set(0) # detail attribute
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
attr.parm("value1").set(123.456)
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.FloatData )
self.assert_( result["test_attribute"].data > IECore.FloatData( 123.0 ) )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(1) # integer
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.IntData )
self.assertEqual( result["test_attribute"].data, IECore.IntData( 123 ) )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(0) # float
attr.parm("size").set(2) # 2 elementS
attr.parm("value2").set(456.789)
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V2fData )
self.assertEqual( result["test_attribute"].data.value, IECore.V2f( 123.456, 456.789 ) )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(1) # int
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V2iData )
self.assertEqual( result["test_attribute"].data.value, IECore.V2i( 123, 456 ) )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(0) # float
attr.parm("size").set(3) # 3 elements
attr.parm("value3").set(999.999)
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V3fData )
self.assertEqual( result["test_attribute"].data.value, IECore.V3f( 123.456, 456.789, 999.999 ) )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set(1) # int
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.V3iData )
self.assertEqual( result["test_attribute"].data.value, IECore.V3i( 123, 456, 999 ) )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
attr.parm("type").set( 3 ) # string
attr.parm( "string" ).set( "string!" )
result = IECoreHoudini.FromHoudiniPointsConverter( attr ).convert()
self.assertEqual( result["test_attribute"].data.typeId(), IECore.TypeId.StringData )
self.assertEqual( result["test_attribute"].data.value, "string!" )
self.assertEqual( result["test_attribute"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assert_( result.arePrimitiveVariablesValid() )
# testing that float[4] doesn't work!
def testFloat4attr( self ) : # we can't deal with float 4's right now
attr = self.testSetupAttributes()
attr.parm("name").set( "test_attribute" )
attr.parm("size").set(4) # 4 elements per point-attribute
converter = IECoreHoudini.FromHoudiniPointsConverter( attr )
result = converter.convert()
self.assert_( "test_attribute" not in result.keys() ) # invalid due to being float[4]
self.assert_( result.arePrimitiveVariablesValid() )
# testing conversion of animating geometry
def testAnimatingGeometry( self ) :
obj = hou.node("/obj")
geo = obj.createNode("geo", run_init_scripts=False)
torus = geo.createNode( "torus" )
facet = geo.createNode( "facet" )
facet.parm("postnml").set(True)
mountain = geo.createNode( "mountain" )
mountain.parm("offset1").setExpression( "$FF" )
points = geo.createNode( "scatter" )
facet.setInput( 0, torus )
mountain.setInput( 0, facet )
points.setInput( 0, mountain )
converter = IECoreHoudini.FromHoudiniPointsConverter( points )
hou.setFrame(1)
points_1 = converter.convert()
hou.setFrame(2)
converter = IECoreHoudini.FromHoudiniPointsConverter( points )
points_2 = converter.convert()
self.assertNotEqual( points_1["P"].data, points_2["P"].data )
# testing we can handle an object being deleted
def testObjectWasDeleted( self ) :
obj = hou.node("/obj")
geo = obj.createNode("geo", run_init_scripts=False)
torus = geo.createNode( "torus" )
converter = IECoreHoudini.FromHoudiniPointsConverter( torus )
g1 = converter.convert()
torus.destroy()
g2 = converter.convert()
self.assertEqual( g2, g1 )
self.assertRaises( RuntimeError, IECore.curry( IECoreHoudini.FromHoudiniPointsConverter, torus ) )
# testing we can handle an object being deleted
def testObjectWasDeletedFactory( self ) :
obj = hou.node("/obj")
geo = obj.createNode("geo", run_init_scripts=False)
torus = geo.createNode( "torus" )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( torus )
g1 = converter.convert()
torus.destroy()
g2 = converter.convert()
self.assertEqual( g2, g1 )
self.assertRaises( RuntimeError, IECore.curry( IECoreHoudini.FromHoudiniGeometryConverter.create, torus ) )
# testing converting a Houdini particle primitive with detail and point attribs
def testParticlePrimitive( self ) :
obj = hou.node("/obj")
geo = obj.createNode( "geo", run_init_scripts=False )
popnet = geo.createNode( "popnet" )
location = popnet.createNode( "location" )
detailAttr = popnet.createOutputNode( "attribcreate", exact_type_name=True )
detailAttr.parm("name").set( "float3detail" )
detailAttr.parm("class").set( 0 ) # detail
detailAttr.parm("type").set( 0 ) # float
detailAttr.parm("size").set( 3 ) # 3 elements
detailAttr.parm("value1").set( 1 )
detailAttr.parm("value2").set( 2 )
detailAttr.parm("value3").set( 3 )
pointAttr = detailAttr.createOutputNode( "attribcreate", exact_type_name=True )
pointAttr.parm("name").set( "float3point" )
pointAttr.parm("class").set( 2 ) # point
pointAttr.parm("type").set( 0 ) # float
pointAttr.parm("size").set( 3 ) # 3 elements
pointAttr.parm("value1").set( 1 )
pointAttr.parm("value2").set( 2 )
pointAttr.parm("value3").set( 3 )
hou.setFrame( 5 )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( pointAttr )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
points = converter.convert()
self.assertEqual( type(points), IECore.PointsPrimitive )
self.assertEqual( points.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ), 21 )
self.assertEqual( points["float3detail"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assertEqual( type(points["float3detail"].data), IECore.V3fData )
self.assert_( points["float3detail"].data.value.equalWithRelError( IECore.V3f( 1, 2, 3 ), 1e-10 ) )
self.assertEqual( type(points["float3point"].data), IECore.V3fVectorData )
self.assertEqual( points["float3point"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
for p in points["float3point"].data :
self.assert_( p.equalWithRelError( IECore.V3f( 1, 2, 3 ), 1e-10 ) )
self.assert_( points.arePrimitiveVariablesValid() )
add = pointAttr.createOutputNode( "add" )
add.parm( "keep" ).set( 1 ) # deletes primitive and leaves points
converter = IECoreHoudini.FromHoudiniPointsConverter( add )
points2 = converter.convert()
del points['generator']
del points['generatorIndices']
del points['born']
del points['source']
self.assertEqual( points2, points )
def testMultipleParticlePrimitives( self ) :
obj = hou.node("/obj")
geo = obj.createNode( "geo", run_init_scripts=False )
popnet = geo.createNode( "popnet" )
fireworks = popnet.createNode( "fireworks" )
hou.setFrame( 15 )
converter = IECoreHoudini.FromHoudiniPointsConverter( popnet )
points = converter.convert()
self.assertEqual( type(points), IECore.PointsPrimitive )
self.assertEqual( points.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ), 24 )
self.assertEqual( points["accel"].interpolation, IECore.PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( type(points["accel"].data), IECore.V3fVectorData )
self.assertEqual( points["accel"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
self.assertEqual( points["nextid"].interpolation, IECore.PrimitiveVariable.Interpolation.Constant )
self.assertEqual( points["nextid"].data, IECore.IntData( 25 ) )
self.assertTrue( points.arePrimitiveVariablesValid() )
add = popnet.createOutputNode( "add" )
add.parm( "keep" ).set( 1 ) # deletes primitive and leaves points
converter = IECoreHoudini.FromHoudiniPointsConverter( add )
points2 = converter.convert()
# showing that prim attribs don't get converted because the interpolation size doesn't match
self.assertEqual( points2, points )
def testName( self ) :
points = self.createPoints()
particles = points.createOutputNode( "add" )
particles.parm( "addparticlesystem" ).set( True )
name = particles.createOutputNode( "name" )
name.parm( "name1" ).set( "points" )
box = points.parent().createNode( "box" )
name2 = box.createOutputNode( "name" )
name2.parm( "name1" ).set( "box" )
merge = name.createOutputNode( "merge" )
merge.setInput( 1, name2 )
converter = IECoreHoudini.FromHoudiniPointsConverter( merge )
result = converter.convert()
# names are not stored on the object at all
self.assertEqual( result.blindData(), IECore.CompoundData() )
self.assertFalse( "name" in result )
self.assertFalse( "nameIndices" in result )
# both shapes were converted as one PointsPrimitive
self.assertEqual( result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ), 5008 )
self.assertEqual( result.variableSize( IECore.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertTrue( result.arePrimitiveVariablesValid() )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( merge, "points" )
self.assertTrue( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
result = converter.convert()
# names are not stored on the object at all
self.assertEqual( result.blindData(), IECore.CompoundData() )
self.assertFalse( "name" in result )
self.assertFalse( "nameIndices" in result )
# only the named points were converted
self.assertEqual( result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ), 5000 )
self.assertTrue( result.arePrimitiveVariablesValid() )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( merge, "box", IECore.TypeId.PointsPrimitive )
self.assertTrue( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
result = converter.convert()
# names are not stored on the object at all
self.assertEqual( result.blindData(), IECore.CompoundData() )
self.assertFalse( "name" in result )
self.assertFalse( "nameIndices" in result )
# only the named points were converted
self.assertEqual( result.variableSize( IECore.PrimitiveVariable.Interpolation.Vertex ), 8 )
self.assertEqual( result.variableSize( IECore.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertTrue( result.arePrimitiveVariablesValid() )
def testAttributeFilter( self ) :
points = self.createPoints()
particles = points.createOutputNode( "add" )
particles.parm( "addparticlesystem" ).set( True )
# add vertex normals
facet = particles.createOutputNode( "facet", node_name = "add_point_normals" )
facet.parm("postnml").set(True)
# add a primitive colour attributes
primcol = facet.createOutputNode( "primitive", node_name = "prim_colour" )
primcol.parm("doclr").set(1)
primcol.parm("diffr").setExpression("rand($PR)")
primcol.parm("diffg").setExpression("rand($PR+1)")
primcol.parm("diffb").setExpression("rand($PR+2)")
detail = primcol.createOutputNode( "attribcreate", node_name = "detail", exact_type_name=True )
detail.parm("name").set("detailAttr")
detail.parm("class").set(0)
detail.parm("type").set(1)
detail.parm("size").set(3)
detail.parm("value1").set(123)
detail.parm("value2").set(456.789) # can we catch it out with a float?
detail.parm("value3").set(789)
converter = IECoreHoudini.FromHoudiniPointsConverter( detail )
self.assertEqual( sorted(converter.convert().keys()), [ "Cs", "N", "P", "detailAttr", "varmap" ] )
converter.parameters()["attributeFilter"].setTypedValue( "P" )
self.assertEqual( sorted(converter.convert().keys()), [ "P" ] )
converter.parameters()["attributeFilter"].setTypedValue( "* ^N ^varmap" )
self.assertEqual( sorted(converter.convert().keys()), [ "Cs", "P", "detailAttr" ] )
# P must be converted
converter.parameters()["attributeFilter"].setTypedValue( "* ^P" )
self.assertTrue( "P" in converter.convert().keys() )
def testStandardAttributeConversion( self ) :
points = self.createPoints()
color = points.createOutputNode( "color" )
color.parm( "colortype" ).set( 2 )
rest = color.createOutputNode( "rest" )
scale = rest.createOutputNode( "attribcreate" )
scale.parm( "name1" ).set( "pscale" )
scale.parm( "value1v1" ).setExpression( "$PT" )
converter = IECoreHoudini.FromHoudiniPointsConverter( scale )
result = converter.convert()
self.assertEqual( result.keys(), [ "Cs", "N", "P", "Pref", "varmap", "width" ] )
self.assertTrue( result.arePrimitiveVariablesValid() )
self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( result["Pref"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( result["N"].data.getInterpretation(), IECore.GeometricData.Interpretation.Normal )
converter["convertStandardAttributes"].setTypedValue( False )
result = converter.convert()
self.assertEqual( result.keys(), [ "Cd", "N", "P", "pscale", "rest", "varmap" ] )
self.assertTrue( result.arePrimitiveVariablesValid() )
self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( result["rest"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( result["N"].data.getInterpretation(), IECore.GeometricData.Interpretation.Normal )
if __name__ == "__main__":
unittest.main()
|
DoubleNegativeVisualEffects/cortex
|
test/IECoreHoudini/FromHoudiniPointsConverter.py
|
Python
|
bsd-3-clause
| 39,834
|
# Copyright (c) 2016, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from . import Pmod_DevMode
from . import PMOD_SWCFG_DIOALL
from . import PMOD_DIO_BASEADDR
from . import PMOD_DIO_TRI_OFFSET
from . import PMOD_DIO_DATA_OFFSET
from . import PMOD_CFG_DIO_ALLOUTPUT
from . import PMOD_NUM_DIGITAL_PINS
__author__ = "Graham Schelle, Giuseppe Natale, Yun Rock Qu"
__copyright__ = "Copyright 2016, Xilinx"
__email__ = "pynq_support@xilinx.com"
class Pmod_LED8(Pmod_DevMode):
"""This class controls a single LED on the LED8 Pmod.
The Pmod LED8 (PB 200-163) has eight high-brightness LEDs. Each LED can be
individually illuminated from a logic high signal.
Attributes
----------
microblaze : Pmod
Microblaze processor instance used by this module.
iop_switch_config :list
Microblaze processor IO switch configuration (8 integers).
index : int
Index of the pin on LED8, starting from 0.
"""
def __init__(self, mb_info, index):
"""Return a new instance of a LED object.
Parameters
----------
mb_info : dict
A dictionary storing Microblaze information, such as the
IP name and the reset name.
index: int
The index of the pin in a Pmod, starting from 0.
"""
if index not in range(PMOD_NUM_DIGITAL_PINS):
raise ValueError("Valid pin indexes are 0 - {}."
.format(PMOD_NUM_DIGITAL_PINS-1))
super().__init__(mb_info, PMOD_SWCFG_DIOALL)
self.index = index
self.start()
self.write_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_TRI_OFFSET,
PMOD_CFG_DIO_ALLOUTPUT)
def toggle(self):
"""Flip the bit of a single LED.
Note
----
The LED will be turned off if it is on. Similarly, it will be turned
on if it is off.
Returns
-------
None
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
new_val = curr_val ^ (0x1 << self.index)
self._set_leds_values(new_val)
def on(self):
"""Turn on a single LED.
Returns
-------
None
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
new_val = curr_val | (0x1 << self.index)
self._set_leds_values(new_val)
def off(self):
"""Turn off a single LED.
Returns
-------
None
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
new_val = curr_val & (0xff ^ (0x1 << self.index))
self._set_leds_values(new_val)
def write(self, value):
"""Set the LED state according to the input value
Note
----
This method does not take into account the current LED state.
Parameters
----------
value : int
Turn on the LED if value is 1; turn it off if value is 0.
Returns
-------
None
"""
if value not in (0, 1):
raise ValueError("LED8 can only write 0 or 1.")
if value:
self.on()
else:
self.off()
def read(self):
"""Retrieve the LED state.
Returns
-------
int
The data (0 or 1) read out from the selected pin.
"""
curr_val = self.read_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET)
return (curr_val >> self.index) & 0x1
def _set_leds_values(self, value):
"""Set the state for all the LEDs.
Note
----
Should not be used directly. User should rely on toggle(), on(),
off(), write(), and read() instead.
Parameters
----------
value : int
The state of all the LEDs encoded in one single value
Returns
-------
None
"""
self.write_cmd(PMOD_DIO_BASEADDR +
PMOD_DIO_DATA_OFFSET, value)
|
schelleg/PYNQ
|
pynq/lib/pmod/pmod_led8.py
|
Python
|
bsd-3-clause
| 5,931
|
"""
Demonstrates how the bz2 module may be used to create a compressed object
which represents a bitarray.
"""
import bz2
from bitarray import bitarray
def compress(ba):
"""
Given a bitarray, return an object which represents all information
within the bitarray in a compresed form.
The function `decompress` can be used to restore the bitarray from the
compresed object.
"""
assert isinstance(ba, bitarray)
return ba.length(), bz2.compress(ba.tobytes()), ba.endian()
def decompress(obj):
"""
Given an object (created by `compress`), return the a copy of the
original bitarray.
"""
n, data, endian = obj
res = bitarray(endian=endian)
res.frombytes(bz2.decompress(data))
del res[n:]
return res
if __name__ == '__main__':
a = bitarray(12345)
a.setall(0)
a[::10] = True
c = compress(a)
print(c)
b = decompress(c)
assert a == b, a.endian() == b.endian()
|
brianhelba/pylibtiff
|
libtiff/bitarray-a1646c0/examples/compress.py
|
Python
|
bsd-3-clause
| 953
|
''' Functions for arranging bokeh Layout objects.
'''
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from .core.enums import Location, SizingMode
from .models.tools import ToolbarBox
from .models.plots import Plot
from .models.layouts import LayoutDOM, Row, Column, Spacer, WidgetBox
from .models.widgets import Widget
from .util._plot_arg_helpers import _convert_responsive
#-----------------------------------------------------------------------------
# Common helper functions
#-----------------------------------------------------------------------------
def _handle_children(*args, **kwargs):
children = kwargs.get('children')
# Set-up Children from args or kwargs
if len(args) > 0 and children is not None:
raise ValueError("'children' keyword cannot be used with positional arguments")
if not children:
if len(args) == 1 and isinstance(args[0], list):
children = args[0]
elif len(args) == 1 and isinstance(args[0], GridSpec):
children = args[0]
else:
children = list(args)
return children
def _verify_sizing_mode(sizing_mode):
if sizing_mode not in SizingMode:
raise ValueError("Invalid value of sizing_mode: %s" % sizing_mode)
def row(*args, **kwargs):
""" Create a row of Bokeh Layout objects. Forces all objects to
have the same sizing_mode, which is required for complex layouts to work.
Args:
children (list of :class:`~bokeh.models.layouts.LayoutDOM` ): A list of instances for
the row. Can be any of the following - :class:`~bokeh.models.plots.Plot`,
:class:`~bokeh.models.widgets.widget.Widget`, :class:`~bokeh.models.layouts.WidgetBox`,
:class:`~bokeh.models.layouts.Row`,
:class:`~bokeh.models.layouts.Column`,
:class:`~bokeh.models.tools.ToolbarBox`,
:class:`~bokeh.models.layouts.Spacer`.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
responsive (``True``, ``False``): True sets ``sizing_mode`` to
``"width_ar"``. ``False`` sets sizing_mode to ``"fixed"``. Using
responsive will override sizing_mode.
Returns:
Row: A row of LayoutDOM objects all with the same sizing_mode.
Examples:
>>> row([plot_1, plot_2])
>>> row(children=[widget_box_1, plot_1], sizing_mode='stretch_both')
"""
responsive = kwargs.pop('responsive', None)
sizing_mode = kwargs.pop('sizing_mode', 'fixed')
children = kwargs.pop('children', None)
if responsive:
sizing_mode = _convert_responsive(responsive)
_verify_sizing_mode(sizing_mode)
children = _handle_children(*args, children=children)
row_children = []
for item in children:
if isinstance(item, LayoutDOM):
item.sizing_mode = sizing_mode
row_children.append(item)
else:
raise ValueError(
"""Only LayoutDOM items can be inserted into a row.
Tried to insert: %s of type %s""" % (item, type(item))
)
return Row(children=row_children, sizing_mode=sizing_mode, **kwargs)
def column(*args, **kwargs):
""" Create a column of Bokeh Layout objects. Forces all objects to
have the same sizing_mode, which is required for complex layouts to work.
Args:
children (list of :class:`~bokeh.models.layouts.LayoutDOM` ): A list of instances for
the column. Can be any of the following - :class:`~bokeh.models.plots.Plot`,
:class:`~bokeh.models.widgets.widget.Widget`, :class:`~bokeh.models.layouts.WidgetBox`,
:class:`~bokeh.models.layouts.Row`,
:class:`~bokeh.models.layouts.Column`,
:class:`~bokeh.models.tools.ToolbarBox`,
:class:`~bokeh.models.layouts.Spacer`.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
responsive (``True``, ``False``): True sets ``sizing_mode`` to
``"width_ar"``. ``False`` sets sizing_mode to ``"fixed"``. Using
responsive will override sizing_mode.
Returns:
Column: A column of LayoutDOM objects all with the same sizing_mode.
Examples:
>>> column([plot_1, plot_2])
>>> column(children=[widget_box_1, plot_1], sizing_mode='stretch_both')
"""
responsive = kwargs.pop('responsive', None)
sizing_mode = kwargs.pop('sizing_mode', 'fixed')
children = kwargs.pop('children', None)
if responsive:
sizing_mode = _convert_responsive(responsive)
_verify_sizing_mode(sizing_mode)
children = _handle_children(*args, children=children)
col_children = []
for item in children:
if isinstance(item, LayoutDOM):
item.sizing_mode = sizing_mode
col_children.append(item)
else:
raise ValueError(
"""Only LayoutDOM items can be inserted into a column.
Tried to insert: %s of type %s""" % (item, type(item))
)
return Column(children=col_children, sizing_mode=sizing_mode, **kwargs)
def widgetbox(*args, **kwargs):
""" Create a WidgetBox of Bokeh widgets. Forces all to
have the same sizing_mode, which is required for complex layouts to work.
Args:
children (list of :class:`~bokeh.models.widgets.widget.Widget` ): A list
of widgets for the WidgetBox.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
responsive (``True``, ``False``): True sets ``sizing_mode`` to
``"width_ar"``. ``False`` sets sizing_mode to ``"fixed"``. Using
responsive will override sizing_mode.
Returns:
WidgetBox: A WidgetBox of Widget instances all with the same sizing_mode.
Examples:
>>> widgetbox([button, select])
>>> widgetbox(children=[slider], sizing_mode='scale_width')
"""
responsive = kwargs.pop('responsive', None)
sizing_mode = kwargs.pop('sizing_mode', 'fixed')
children = kwargs.pop('children', None)
if responsive:
sizing_mode = _convert_responsive(responsive)
_verify_sizing_mode(sizing_mode)
children = _handle_children(*args, children=children)
widget_children = []
for item in children:
if isinstance(item, Widget):
item.sizing_mode = sizing_mode
widget_children.append(item)
else:
raise ValueError(
"""Only Widgets can be inserted into a WidgetBox.
Tried to insert: %s of type %s""" % (item, type(item))
)
return WidgetBox(children=widget_children, sizing_mode=sizing_mode, **kwargs)
def layout(*args, **kwargs):
""" Create a grid-based arrangement of Bokeh Layout objects. Forces all objects to
have the same sizing mode, which is required for complex layouts to work. Returns a nested set
of Rows and Columns.
Args:
children (list of lists of :class:`~bokeh.models.layouts.LayoutDOM` ): A list of lists of instances
for a grid layout. Can be any of the following - :class:`~bokeh.models.plots.Plot`,
:class:`~bokeh.models.widgets.widget.Widget`, :class:`~bokeh.models.layouts.WidgetBox`,
:class:`~bokeh.models.layouts.Row`,
:class:`~bokeh.models.layouts.Column`,
:class:`~bokeh.models.tools.ToolbarBox`,
:class:`~bokeh.models.layouts.Spacer`.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
responsive (``True``, ``False``): True sets ``sizing_mode`` to
``"width_ar"``. ``False`` sets sizing_mode to ``"fixed"``. Using
responsive will override sizing_mode.
Returns:
Column: A column of ``Row`` layouts of the children, all with the same sizing_mode.
Examples:
>>> layout([[plot_1, plot_2], [plot_3, plot_4]])
>>> layout(
children=[
[widget_box_1, plot_1],
[slider],
[widget_box_2, plot_2, plot_3]
],
sizing_mode='fixed',
)
"""
responsive = kwargs.pop('responsive', None)
sizing_mode = kwargs.pop('sizing_mode', 'fixed')
children = kwargs.pop('children', None)
if responsive:
sizing_mode = _convert_responsive(responsive)
_verify_sizing_mode(sizing_mode)
children = _handle_children(*args, children=children)
# Make the grid
rows = []
for r in children:
row_children = []
for item in r:
if isinstance(item, LayoutDOM):
item.sizing_mode = sizing_mode
row_children.append(item)
else:
raise ValueError(
"""Only LayoutDOM items can be inserted into a layout.
Tried to insert: %s of type %s""" % (item, type(item))
)
rows.append(row(children=row_children, sizing_mode=sizing_mode))
grid = column(children=rows, sizing_mode=sizing_mode)
return grid
def _chunks(l, ncols):
"""Yield successive n-sized chunks from list, l."""
assert isinstance(ncols, int), "ncols must be an integer"
for i in range(0, len(l), ncols):
yield l[i: i+ncols]
def gridplot(*args, **kwargs):
""" Create a grid of plots rendered on separate canvases. ``gridplot`` builds a single toolbar
for all the plots in the grid. ``gridplot`` is designed to layout a set of plots. For general
grid layout, use the :func:`~bokeh.layouts.layout` function.
Args:
children (list of lists of :class:`~bokeh.models.plots.Plot` ): An
array of plots to display in a grid, given as a list of lists of Plot
objects. To leave a position in the grid empty, pass None for that
position in the children list. OR list of :class:`~bokeh.models.plots.Plot` if called with
ncols. OR an instance of GridSpec.
sizing_mode (``"fixed"``, ``"stretch_both"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"`` ): How
will the items in the layout resize to fill the available space.
Default is ``"fixed"``. For more information on the different
modes see :attr:`~bokeh.models.layouts.LayoutDOM.sizing_mode`
description on :class:`~bokeh.models.layouts.LayoutDOM`.
toolbar_location (``above``, ``below``, ``left``, ``right`` ): Where the
toolbar will be located, with respect to the grid. Default is
``above``. If set to None, no toolbar will be attached to the grid.
ncols ``Int`` (optional): Specify the number of columns you would like in your grid.
You must only pass an un-nested list of plots (as opposed to a list of lists of plots)
when using ncols.
responsive (``True``, ``False``): True sets ``sizing_mode`` to
``"width_ar"``. ``False`` sets sizing_mode to ``"fixed"``. Using
responsive will override sizing_mode.
plot_width (int, optional): The width you would like all your plots to be
plot_height (int, optional): The height you would like all your plots to be.
toolbar_options (dict, optional) : A dictionary of options that will be
used to construct the grid's toolbar (an instance of
:class:`~bokeh.models.tools.ToolbarBox`). If none is supplied,
ToolbarBox's defaults will be used.
merge_tools (``True``, ``False``): Combine tools from all child plots into
a single toolbar.
Returns:
Row or Column: A row or column containing the grid toolbar and the grid
of plots (depending on whether the toolbar is left/right or
above/below. The grid is always a Column of Rows of plots.
Examples:
>>> gridplot([[plot_1, plot_2], [plot_3, plot_4]])
>>> gridplot([plot_1, plot_2, plot_3, plot_4], ncols=2, plot_width=200, plot_height=100)
>>> gridplot(
children=[[plot_1, plot_2], [None, plot_3]],
toolbar_location='right'
sizing_mode='fixed',
toolbar_options=dict(logo='gray')
)
"""
toolbar_location = kwargs.get('toolbar_location', 'above')
sizing_mode = kwargs.get('sizing_mode', 'fixed')
children = kwargs.get('children')
responsive = kwargs.get('responsive')
toolbar_options = kwargs.get('toolbar_options')
plot_width = kwargs.get('plot_width')
plot_height = kwargs.get('plot_height')
ncols = kwargs.get('ncols')
merge_tools = kwargs.get('merge_tools', True)
# Integrity checks & set-up
if responsive:
sizing_mode = _convert_responsive(responsive)
_verify_sizing_mode(sizing_mode)
if toolbar_location:
if not hasattr(Location, toolbar_location):
raise ValueError("Invalid value of toolbar_location: %s" % toolbar_location)
children = _handle_children(*args, children=children)
if ncols:
if any(isinstance(child, list) for child in children):
raise ValueError("Cannot provide a nested list when using ncols")
children = list(_chunks(children, ncols))
# Additional children set-up for grid plot
if not children:
children = []
# Make the grid
tools = []
rows = []
for row in children:
row_tools = []
row_children = []
for item in row:
if merge_tools:
if item is not None:
for plot in item.select(dict(type=Plot)):
row_tools = row_tools + plot.toolbar.tools
plot.toolbar_location = None
if item is None:
for neighbor in row:
if isinstance(neighbor, Plot):
break
item = Spacer(width=neighbor.plot_width, height=neighbor.plot_height)
if isinstance(item, LayoutDOM):
item.sizing_mode = sizing_mode
if isinstance(item, Plot):
if plot_width:
item.plot_width = plot_width
if plot_height:
item.plot_height = plot_height
row_children.append(item)
else:
raise ValueError("Only LayoutDOM items can be inserted into Grid")
tools = tools + row_tools
rows.append(Row(children=row_children, sizing_mode=sizing_mode))
grid = Column(children=rows, sizing_mode=sizing_mode)
if not merge_tools:
return grid
# Make the toolbar
if toolbar_location:
if not toolbar_options:
toolbar_options = {}
if 'toolbar_location' not in toolbar_options:
toolbar_options['toolbar_location'] = toolbar_location
# Fixed sizing mode needs scale_width for the toolbar
# for layout to work correctly.
if sizing_mode == 'fixed':
toolbar_sizing_mode = 'scale_width'
else:
toolbar_sizing_mode = sizing_mode
toolbar = ToolbarBox(
tools=tools,
sizing_mode=toolbar_sizing_mode,
**toolbar_options
)
# Set up children
if toolbar_location == 'above':
return Column(children=[toolbar, grid], sizing_mode=sizing_mode)
elif toolbar_location == 'below':
return Column(children=[grid, toolbar], sizing_mode=sizing_mode)
elif toolbar_location == 'left':
return Row(children=[toolbar, grid], sizing_mode=sizing_mode)
elif toolbar_location == 'right':
return Row(children=[grid, toolbar], sizing_mode=sizing_mode)
else:
return grid
class GridSpec(object):
""" Simplifies grid layout specification. """
def __init__(self, nrows, ncols):
self.nrows = nrows
self.ncols = ncols
self._arrangement = {}
def __setitem__(self, key, obj):
k1, k2 = key
if isinstance(k1, slice):
row1, row2, _ = k1.indices(self.nrows)
else:
if k1 < 0:
k1 += self.nrows
if k1 >= self.nrows or k1 < 0:
raise IndexError("index out of range")
row1, row2 = k1, None
if isinstance(k2, slice):
col1, col2, _ = k2.indices(self.ncols)
else:
if k2 < 0:
k2 += self.ncols
if k2 >= self.ncols or k2 < 0:
raise IndexError("index out of range")
col1, col2 = k2, None
# gs[row, col] = obj
# gs[row1:row2, col] = [...]
# gs[row, col1:col2] = [...]
# gs[row1:row2, col1:col2] = [[...], ...]
def get_or_else(fn, default):
try:
return fn()
except IndexError:
return default
if row2 is None and col2 is None:
self._arrangement[row1, col1] = obj
elif row2 is None:
for col in range(col1, col2):
self._arrangement[row1, col] = get_or_else(lambda: obj[col-col1], None)
elif col2 is None:
for row in range(row1, row2):
self._arrangement[row, col1] = get_or_else(lambda: obj[row-row1], None)
else:
for row, col in zip(range(row1, row2), range(col1, col2)):
self._arrangement[row, col] = get_or_else(lambda: obj[row-row1][col-col1], None)
def __iter__(self):
array = [ [ None ]*self.ncols for _ in range(0, self.nrows) ]
for (row, col), obj in self._arrangement.items():
array[row][col] = obj
return iter(array)
|
percyfal/bokeh
|
bokeh/layouts.py
|
Python
|
bsd-3-clause
| 19,180
|
import importlib
import inspect
import os
import re
import sys
import tempfile
from io import StringIO
from pathlib import Path
from django.conf.urls import url
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db import DatabaseError, connection
from django.shortcuts import render
from django.template import TemplateDoesNotExist
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin, patch_logger
from django.urls import reverse
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.views.debug import (
CLEANSED_SUBSTITUTE, CallableSettingWrapper, ExceptionReporter,
cleanse_setting, technical_500_response,
)
from ..views import (
custom_exception_reporter_filter_view, index_page,
multivalue_dict_key_error, non_sensitive_view, paranoid_view,
sensitive_args_function_caller, sensitive_kwargs_function_caller,
sensitive_method_view, sensitive_view,
)
PY36 = sys.version_info >= (3, 6)
class User:
def __str__(self):
return 'jacob'
class WithoutEmptyPathUrls:
urlpatterns = [url(r'url/$', index_page, name='url')]
class CallableSettingWrapperTests(SimpleTestCase):
""" Unittests for CallableSettingWrapper
"""
def test_repr(self):
class WrappedCallable:
def __repr__(self):
return "repr from the wrapped callable"
def __call__(self):
pass
actual = repr(CallableSettingWrapper(WrappedCallable()))
self.assertEqual(actual, "repr from the wrapped callable")
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class DebugViewTests(LoggingCaptureMixin, SimpleTestCase):
def test_files(self):
response = self.client.get('/raises/')
self.assertEqual(response.status_code, 500)
data = {
'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'),
}
response = self.client.post('/raises/', data)
self.assertContains(response, 'file_data.txt', status_code=500)
self.assertNotContains(response, 'haha', status_code=500)
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
# Ensure no 403.html template exists to test the default case.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
}])
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
# Set up a test 403.html template.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
('django.template.loaders.locmem.Loader', {
'403.html': 'This is a test template for a 403 error ({{ exception }}).',
}),
],
},
}])
def test_403_template(self):
response = self.client.get('/raises403/')
self.assertContains(response, 'test template', status_code=403)
self.assertContains(response, '(Insufficient Permissions).', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_raised_404(self):
response = self.client.get('/views/raises404/')
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
def test_404_not_in_urls(self):
response = self.client.get('/not-in-urls')
self.assertNotContains(response, "Raised by:", status_code=404)
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
@override_settings(ROOT_URLCONF=WithoutEmptyPathUrls)
def test_404_empty_path_not_in_urls(self):
response = self.client.get('/')
self.assertContains(response, "The empty path didn't match any of these.", status_code=404)
def test_technical_404(self):
response = self.client.get('/views/technical404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.technical404", status_code=404)
def test_classbased_technical_404(self):
response = self.client.get('/views/classbased404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.Http404View", status_code=404)
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized.
"""
with self.settings(DEBUG=True, USE_L10N=True):
response = self.client.get('/raises500/')
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"'
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content)
self.assertIsNotNone(match)
id_repr = match.group('id')
self.assertFalse(
re.search(b'[^c0-9]', id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr.decode()
)
def test_template_exceptions(self):
try:
self.client.get(reverse('template_exception'))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertNotEqual(
raising_loc.find("raise Exception('boom')"), -1,
"Failed to find 'raise Exception' in last frame of "
"traceback, instead found: %s" % raising_loc
)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile:
tempdir = os.path.dirname(tmpfile.name)
template_path = os.path.join(tempdir, template_name)
with override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [tempdir],
}]):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2)
# Assert as HTML.
self.assertContains(
response,
'<li><code>django.template.loaders.filesystem.Loader</code>: '
'%s (Source does not exist)</li>' % os.path.join(tempdir, 'notfound.html'),
status_code=500,
html=True,
)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow up.
"""
with self.assertRaises(TemplateDoesNotExist):
self.client.get('/render_no_template/')
@override_settings(ROOT_URLCONF='view_tests.default_urls')
def test_default_urlconf_template(self):
"""
Make sure that the default URLconf template is shown shown instead
of the technical 404 page, if the user has not altered their
URLconf yet.
"""
response = self.client.get('/')
self.assertContains(
response,
"<h2>Congratulations on your first Django-powered page.</h2>"
)
@override_settings(ROOT_URLCONF='view_tests.regression_21530_urls')
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get('/')
self.assertContains(
response,
"Page not found <span>(404)</span>",
status_code=404
)
class DebugViewQueriesAllowedTests(SimpleTestCase):
# May need a query to initialize MySQL connection
allow_database_queries = True
def test_handle_db_exception(self):
"""
Ensure the debug view works when a database exception is raised by
performing an invalid query and passing the exception to the debug view.
"""
with connection.cursor() as cursor:
try:
cursor.execute('INVALID SQL')
except DatabaseError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get('/'), *exc_info)
self.assertContains(response, 'OperationalError at /', status_code=500)
@override_settings(
DEBUG=True,
ROOT_URLCONF='view_tests.urls',
# No template directories are configured, so no templates will be found.
TEMPLATES=[{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
}],
)
class NonDjangoTemplatesDebugViewTests(SimpleTestCase):
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with patch_logger('django.security.SuspiciousOperation', 'error'):
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_template_not_found_error(self):
# Raises a TemplateDoesNotExist exception and shows the debug view.
url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"})
response = self.client.get(url)
self.assertContains(response, '<div class="context" id="', status_code=500)
class ExceptionReporterTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<p>jacob</p>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
self.assertIn('<p>No POST data</p>', html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_eol_support(self):
"""The ExceptionReporter supports Unix, Windows and Macintosh EOL markers"""
LINES = ['print %d' % i for i in range(1, 6)]
reporter = ExceptionReporter(None, None, None, None)
for newline in ['\n', '\r\n', '\r']:
fd, filename = tempfile.mkstemp(text=False)
os.write(fd, force_bytes(newline.join(LINES) + newline))
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:])
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_reporting_of_nested_exceptions(self):
request = self.rf.get('/test_view/')
try:
try:
raise AttributeError('Top level')
except AttributeError as explicit:
try:
raise ValueError('Second exception') from explicit
except ValueError:
raise IndexError('Final exception')
except Exception:
# Custom exception handler, just pass it into ExceptionReporter
exc_type, exc_value, tb = sys.exc_info()
explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:'
implicit_exc = 'During handling of the above exception ({0}), another exception occurred:'
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
# Both messages are twice on page -- one rendered as html,
# one as plain text (for pastebin)
self.assertEqual(2, html.count(explicit_exc.format("Top level")))
self.assertEqual(2, html.count(implicit_exc.format("Second exception")))
text = reporter.get_traceback_text()
self.assertIn(explicit_exc.format("Top level"), text)
self.assertIn(implicit_exc.format("Second exception"), text)
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_non_utf8_values_handling(self):
"Non-UTF-8 exceptions/values should not make the output generation choke."
try:
class NonUtf8Output(Exception):
def __repr__(self):
return b'EXC\xe9EXC'
somevar = b'VAL\xe9VAL' # NOQA
raise NonUtf8Output()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('VAL\\xe9VAL', html)
self.assertIn('EXC\\xe9EXC', html)
def test_unprintable_values_handling(self):
"Unprintable values should not make the output generation choke."
try:
class OomOutput:
def __repr__(self):
raise MemoryError('OOM')
oomvalue = OomOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<td class="code"><pre>Error in formatting', html)
def test_too_large_values_handling(self):
"Large values should not create a large HTML."
large = 256 * 1024
repr_of_str_adds = len(repr(''))
try:
class LargeOutput:
def __repr__(self):
return repr('A' * large)
largevalue = LargeOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb
self.assertIn('<trimmed %d bytes string>' % (large + repr_of_str_adds,), html)
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError. Refs #21443.
"""
try:
request = self.rf.get('/test_view/')
importlib.import_module('abc.def.invalid.name')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>%sError at /test_view/</h1>' % ('ModuleNotFound' if PY36 else 'Import'), html)
def test_ignore_traceback_evaluation_exceptions(self):
"""
Don't trip over exceptions generated by crafted objects when
evaluating them while cleansing (#24455).
"""
class BrokenEvaluation(Exception):
pass
def broken_setup():
raise BrokenEvaluation
request = self.rf.get('/test_view/')
broken_lazy = SimpleLazyObject(broken_setup)
try:
bool(broken_lazy)
except BrokenEvaluation:
exc_type, exc_value, tb = sys.exc_info()
self.assertIn(
"BrokenEvaluation",
ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(),
"Evaluation exception reason not mentioned in traceback"
)
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn("http://evil.com/", html)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
value = '<td>items</td><td class="code"><pre>'Oops'</pre></td>'
# GET
request = self.rf.get('/test_view/?items=Oops')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# POST
request = self.rf.post('/test_view/', data={'items': 'Oops'})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# FILES
fp = StringIO('filecontent')
request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre><InMemoryUploadedFile: '
'items (application/octet-stream)></pre></td>',
html
)
# COOKES
rf = RequestFactory()
rf.cookies['items'] = 'Oops'
request = rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML('<td>items</td><td class="code"><pre>'Oops'</pre></td>', html)
def test_exception_fetching_user(self):
"""
The error page can be rendered if the current user can't be retrieved
(such as when the database is unavailable).
"""
class ExceptionUser:
def __str__(self):
raise Exception()
request = self.rf.get('/test_view/')
request.user = ExceptionUser()
try:
raise ValueError('Oops')
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<p>[unable to retrieve the current user]</p>', html)
text = reporter.get_traceback_text()
self.assertIn('USER: [unable to retrieve the current user]', text)
class PlainTextReportTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError at /test_view/', text)
self.assertIn("Can't find my keys", text)
self.assertIn('Request Method:', text)
self.assertIn('Request URL:', text)
self.assertIn('USER: jacob', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request information:', text)
self.assertNotIn('Request data not supplied', text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError', text)
self.assertIn("Can't find my keys", text)
self.assertNotIn('Request Method:', text)
self.assertNotIn('Request URL:', text)
self.assertNotIn('USER:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request data not supplied', text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(DEBUG=True)
def test_template_exception(self):
request = self.rf.get('/test_view/')
try:
render(request, 'debug/template_error.html')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
templ_path = Path(Path(__file__).parent.parent, 'templates', 'debug', 'template_error.html')
self.assertIn(
'Template error:\n'
'In template %(path)s, error at line 2\n'
' \'cycle\' tag requires at least two arguments\n'
' 1 : Template with error:\n'
' 2 : {%% cycle %%} \n'
' 3 : ' % {'path': templ_path},
text
)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
# GET
request = self.rf.get('/test_view/?items=Oops')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# POST
request = self.rf.post('/test_view/', data={'items': 'Oops'})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# FILES
fp = StringIO('filecontent')
request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn('items = <InMemoryUploadedFile:', text)
# COOKES
rf = RequestFactory()
rf.cookies['items'] = 'Oops'
request = rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("http://evil.com/", text)
class ExceptionReportTestMixin:
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {'sausage-key': 'sausage-value',
'baked-beans-key': 'baked-beans-value',
'hash-brown-key': 'hash-brown-value',
'bacon-key': 'bacon-value'}
def verify_unsafe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, 'baked-beans-value', status_code=500)
self.assertContains(response, 'hash-brown-value', status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, 'sausage-value', status_code=500)
self.assertNotContains(response, 'bacon-value', status_code=500)
def verify_paranoid_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertNotContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', 'admin@fattie-breakie.com')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', 'admin@fattie-breakie.com')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertNotIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn('baked-beans-value', body_plain)
self.assertIn('hash-brown-value', body_plain)
self.assertIn('baked-beans-value', body_html)
self.assertIn('hash-brown-value', body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn('sausage-value', body_plain)
self.assertNotIn('bacon-value', body_plain)
self.assertNotIn('sausage-value', body_html)
self.assertNotIn('bacon-value', body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', 'admin@fattie-breakie.com')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = str(email.body)
self.assertNotIn('cooked_eggs', body)
self.assertNotIn('scrambled', body)
self.assertNotIn('sauce', body)
self.assertNotIn('worcestershire', body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
@override_settings(ROOT_URLCONF='view_tests.urls')
class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Sensitive information can be filtered out of error reports (#14614).
"""
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
No POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
The sensitive_variables decorator works with object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_method_view, check_for_POST_params=False)
self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_method_view, check_for_POST_params=False)
self.verify_safe_email(sensitive_method_view, check_for_POST_params=False)
def test_sensitive_function_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as arguments to the decorated
function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False)
def test_sensitive_function_keyword_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as keyword arguments to the
decorated function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_callable_settings_forbidding_to_set_attributes(self):
"""
Callable settings which forbid to set attributes should not break
the debug page (#23070).
"""
class CallableSettingWithSlots:
__slots__ = []
def __call__(self):
return "This should not be displayed"
with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get('/raises500/')
self.assertContains(response, 'FOOBAR', status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
'recursive': {setting: "should not be displayed"},
}
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
class AjaxResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Sensitive information can be filtered out of error reports.
Here we specifically test the plain text 500 debug-only error page served
when it has been detected the request was sent by JS code. We don't check
for (non)existence of frames vars in the traceback information section of
the response content because we don't include them in these error pages.
Refs #14614.
"""
rf = RequestFactory(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
def test_non_sensitive_request(self):
"""
Request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
No POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False)
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
def test_ajax_response_encoding(self):
response = self.client.get('/raises500/', HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response['Content-Type'], 'text/plain; charset=utf-8')
class HelperFunctionTests(SimpleTestCase):
def test_cleanse_setting_basic(self):
self.assertEqual(cleanse_setting('TEST', 'TEST'), 'TEST')
self.assertEqual(cleanse_setting('PASSWORD', 'super_secret'), CLEANSED_SUBSTITUTE)
def test_cleanse_setting_ignore_case(self):
self.assertEqual(cleanse_setting('password', 'super_secret'), CLEANSED_SUBSTITUTE)
def test_cleanse_setting_recurses_in_dictionary(self):
initial = {'login': 'cooper', 'password': 'secret'}
expected = {'login': 'cooper', 'password': CLEANSED_SUBSTITUTE}
self.assertEqual(cleanse_setting('SETTING_NAME', initial), expected)
|
etos/django
|
tests/view_tests/tests/test_debug.py
|
Python
|
bsd-3-clause
| 47,063
|
import os, tempfile, zipfile, tarfile, logging
from django.core.servers.basehttp import FileWrapper
from django.http import HttpResponse
def get_zipfile(file_list):
"""
Create a ZIP file on disk and transmit it in chunks of 8KB,
without loading the whole file into memory.
"""
temp = tempfile.TemporaryFile()
archive = zipfile.ZipFile(temp, 'w', zipfile.ZIP_DEFLATED)
for file in file_list:
file = file.encode("utf-8")
if os.path.exists(file):
archive.write(file, os.path.basename(file))
else:
logging.warn("zipfile could not find %s" % file)
archive.close()
wrapper = FileWrapper(temp)
response = HttpResponse(wrapper, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=commcarehq.zip'
response['Content-Length'] = temp.tell()
# this seek is required for 'response' to work
temp.seek(0)
return response
def build_tarfile(file_list, output_file):
"""
Creates a tarfile on disk, given a list of input files
"""
tar = tarfile.open(name=output_file, mode="w:bz2")
if len (file_list) == 0:
logging.info("No submissions could be found.")
return HttpResponse("No submissions could be found.")
for file in file_list:
tar.add(file, os.path.basename(file) )
tar.close()
return tar
def get_tarfile(file_list, output_file):
"""
Creates a tarfile on disk, given a list of input files,
and returns it as an http response.
"""
tar = build_tarfile(file_list, output_file)
fin = open(output_file, 'rb')
wrapper = FileWrapper(fin)
response = HttpResponse(wrapper, content_type='application/tar')
response['Content-Disposition'] = 'attachment; filename=commcarehq.tar'
response['Content-Length'] = os.path.getsize(output_file)
# this seek is required for 'response' to work
return response
class Compressor(object):
""" Interface to create a compressed file on disk, given streams """
def open(self, output_file):
raise NotImplementedError()
def add_stream(self, stream, size=0, name=None ):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
class TarCompressor(Compressor):
""" Interface to create a tarfile on disk, given various input streams """
def __init__(self):
self._tar = None
def open(self, name=None, fileobj=None):
if name == None and fileobj == None:
raise ValueError('Either name or fileobj must be supplied to TarCompressor')
self._tar = tarfile.open(name=name, fileobj=fileobj, mode="w:bz2")
def add_stream(self, stream, size=0, name=None):
tar_info = tarfile.TarInfo( name=name )
tar_info.size = size
self._tar.addfile(tar_info, fileobj=stream)
def add_file(self, file):
self._tar.add(file, os.path.basename(file))
def close(self):
self._tar.close()
|
commtrack/commtrack-core
|
apps/transformers/zip.py
|
Python
|
bsd-3-clause
| 3,024
|
# -*- coding: utf-8 -*-
"""
sphinx.builders.gettext
~~~~~~~~~~~~~~~~~~~~~~~
The MessageCatalogBuilder class.
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
from os import path, walk
from codecs import open
from time import time
from datetime import datetime, tzinfo, timedelta
from collections import defaultdict
from uuid import uuid4
from six import iteritems
from sphinx.builders import Builder
from sphinx.util import split_index_msg
from sphinx.util.nodes import extract_messages, traverse_translatable_index
from sphinx.util.osutil import safe_relpath, ensuredir, SEP
from sphinx.util.i18n import find_catalog
from sphinx.util.console import darkgreen, purple, bold
from sphinx.locale import pairindextypes
POHEADER = r"""
# SOME DESCRIPTIVE TITLE.
# Copyright (C) %(copyright)s
# This file is distributed under the same license as the %(project)s package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: %(project)s %(version)s\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: %(ctime)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"""[1:]
class Catalog(object):
"""Catalog of translatable messages."""
def __init__(self):
self.messages = [] # retain insertion order, a la OrderedDict
self.metadata = {} # msgid -> file, line, uid
def add(self, msg, origin):
if not hasattr(origin, 'uid'):
# Nodes that are replicated like todo don't have a uid,
# however i18n is also unnecessary.
return
if msg not in self.metadata: # faster lookup in hash
self.messages.append(msg)
self.metadata[msg] = []
self.metadata[msg].append((origin.source, origin.line, origin.uid))
class MsgOrigin(object):
"""
Origin holder for Catalog message origin.
"""
def __init__(self, source, line):
self.source = source
self.line = line
self.uid = uuid4().hex
class I18nBuilder(Builder):
"""
General i18n builder.
"""
name = 'i18n'
versioning_method = 'text'
versioning_compare = None # be set by `gettext_uuid`
def __init__(self, app):
self.versioning_compare = app.env.config.gettext_uuid
super(I18nBuilder, self).__init__(app)
def init(self):
Builder.init(self)
self.catalogs = defaultdict(Catalog)
def get_target_uri(self, docname, typ=None):
return ''
def get_outdated_docs(self):
return self.env.found_docs
def prepare_writing(self, docnames):
return
def compile_catalogs(self, catalogs, message):
return
def write_doc(self, docname, doctree):
catalog = self.catalogs[find_catalog(docname,
self.config.gettext_compact)]
for node, msg in extract_messages(doctree):
catalog.add(msg, node)
if 'index' in self.env.config.gettext_additional_targets:
# Extract translatable messages from index entries.
for node, entries in traverse_translatable_index(doctree):
for typ, msg, tid, main in entries:
for m in split_index_msg(typ, msg):
if typ == 'pair' and m in pairindextypes.values():
# avoid built-in translated message was incorporated
# in 'sphinx.util.nodes.process_index_entry'
continue
catalog.add(m, node)
# determine tzoffset once to remain unaffected by DST change during build
timestamp = time()
tzdelta = datetime.fromtimestamp(timestamp) - \
datetime.utcfromtimestamp(timestamp)
class LocalTimeZone(tzinfo):
def __init__(self, *args, **kw):
super(LocalTimeZone, self).__init__(*args, **kw)
self.tzdelta = tzdelta
def utcoffset(self, dt):
return self.tzdelta
def dst(self, dt):
return timedelta(0)
ltz = LocalTimeZone()
class MessageCatalogBuilder(I18nBuilder):
"""
Builds gettext-style message catalogs (.pot files).
"""
name = 'gettext'
def init(self):
I18nBuilder.init(self)
self.create_template_bridge()
self.templates.init(self)
def _collect_templates(self):
template_files = set()
for template_path in self.config.templates_path:
tmpl_abs_path = path.join(self.app.srcdir, template_path)
for dirpath, dirs, files in walk(tmpl_abs_path):
for fn in files:
if fn.endswith('.html'):
filename = path.join(dirpath, fn)
filename = filename.replace(path.sep, SEP)
template_files.add(filename)
return template_files
def _extract_from_template(self):
files = self._collect_templates()
self.info(bold('building [%s]: ' % self.name), nonl=1)
self.info('targets for %d template files' % len(files))
extract_translations = self.templates.environment.extract_translations
for template in self.app.status_iterator(
files, 'reading templates... ', purple, len(files)):
with open(template, 'r', encoding='utf-8') as f:
context = f.read()
for line, meth, msg in extract_translations(context):
origin = MsgOrigin(template, line)
self.catalogs['sphinx'].add(msg, origin)
def build(self, docnames, summary=None, method='update'):
self._extract_from_template()
I18nBuilder.build(self, docnames, summary, method)
def finish(self):
I18nBuilder.finish(self)
data = dict(
version = self.config.version,
copyright = self.config.copyright,
project = self.config.project,
ctime = datetime.fromtimestamp(
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
)
for textdomain, catalog in self.app.status_iterator(
iteritems(self.catalogs), "writing message catalogs... ",
darkgreen, len(self.catalogs),
lambda textdomain__: textdomain__[0]):
# noop if config.gettext_compact is set
ensuredir(path.join(self.outdir, path.dirname(textdomain)))
pofn = path.join(self.outdir, textdomain + '.pot')
pofile = open(pofn, 'w', encoding='utf-8')
try:
pofile.write(POHEADER % data)
for message in catalog.messages:
positions = catalog.metadata[message]
if self.config.gettext_location:
# generate "#: file1:line1\n#: file2:line2 ..."
pofile.write("#: %s\n" % "\n#: ".join(
"%s:%s" % (safe_relpath(source, self.outdir), line)
for source, line, _ in positions))
if self.config.gettext_uuid:
# generate "# uuid1\n# uuid2\n ..."
pofile.write("# %s\n" % "\n# ".join(
uid for _, _, uid in positions))
# message contains *one* line of text ready for translation
message = message.replace('\\', r'\\'). \
replace('"', r'\"'). \
replace('\n', '\\n"\n"')
pofile.write('msgid "%s"\nmsgstr ""\n\n' % message)
finally:
pofile.close()
|
WhySoGeeky/DroidPot
|
venv/lib/python2.7/site-packages/sphinx/builders/gettext.py
|
Python
|
mit
| 7,833
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging as loggers
import numpy as np
import theano.tensor as T
from theano.tensor.nnet import conv
from theano.tensor.signal import downsample
from deepy.utils import build_activation, UniformInitializer
from deepy.layers.layer import NeuralLayer
logging = loggers.getLogger(__name__)
class Convolution(NeuralLayer):
"""
Convolution layer with max-pooling.
"""
def __init__(self, filter_shape, pool_size=(2, 2),
reshape_input=False, border_mode="valid", flatten_output=False,
disable_pooling=False, activation='linear', init=None):
super(Convolution, self).__init__("convolution")
self.filter_shape = filter_shape
self.output_dim = filter_shape[0]
self.pool_size = pool_size
self.reshape_input = reshape_input
self.flatten_output = flatten_output
self.activation = activation
self.disable_pooling = disable_pooling
self.border_mode = border_mode
self.initializer = init if init else self._default_initializer()
def setup(self):
self._setup_params()
self._setup_functions()
def output(self, x):
if self.reshape_input:
img_width = T.cast(T.sqrt(x.shape[1]), "int32")
x = x.reshape((x.shape[0], 1, img_width, img_width), ndim=4)
conv_out = conv.conv2d(
input=x,
filters=self.W_conv,
filter_shape=self.filter_shape,
image_shape=None,
border_mode=self.border_mode
)
pooled_out = downsample.max_pool_2d(
input=conv_out,
ds=self.pool_size,
ignore_border=True
)
if self.disable_pooling:
pooled_out = conv_out
output = self._activation_func(pooled_out + self.B_conv.dimshuffle('x', 0, 'x', 'x'))
if self.flatten_output:
output = output.flatten(2)
return output
def _setup_functions(self):
self._activation_func = build_activation(self.activation)
def _setup_params(self):
self.W_conv = self.create_weight(suffix="conv", initializer=self.initializer, shape=self.filter_shape)
self.B_conv = self.create_bias(self.filter_shape[0], suffix="conv")
self.register_parameters(self.W_conv, self.B_conv)
def _default_initializer(self):
fan_in = np.prod(self.filter_shape[1:])
fan_out = (self.filter_shape[0] * np.prod(self.filter_shape[2:]) /
np.prod(self.pool_size))
weight_scale = np.sqrt(6. / (fan_in + fan_out))
return UniformInitializer(scale=weight_scale)
|
ZhangAustin/deepy
|
deepy/layers/conv.py
|
Python
|
mit
| 2,682
|
"""
Test rpc_util functions
"""
__author__ = 'Dan Gunter <dkgunter@lbl.gov>'
__date__ = '8/28/15'
# Imports
# stdlib
import re
# third-party
from nose.tools import raises
# local
from doekbase.data_api import rpc_util
from thrift.Thrift import TType
class Metadata:
"""SAMPLE object for testing validation, etc.
Default metadata for an object.
Attributes:
- object_id
- object_name
- object_reference
- object_reference_versioned
- type_string
- save_date
- version
- saved_by
- workspace_id
- workspace_name
- object_checksum
- object_size
- object_metadata
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'object_id', None, None,), # 1
(2, TType.STRING, 'object_name', None, None,), # 2
(3, TType.STRING, 'object_reference', None, None,), # 3
(4, TType.STRING, 'object_reference_versioned', None, None,), # 4
(5, TType.STRING, 'type_string', None, None,), # 5
(6, TType.DOUBLE, 'save_timestamp', None, None,), # 6
(7, TType.STRING, 'version', None, None,), # 7
(8, TType.STRING, 'saved_by', None, None,), # 8
(9, TType.I64, 'workspace_id', None, None,), # 9
(10, TType.STRING, 'workspace_name', None, None,), # 10
(11, TType.STRING, 'object_checksum', None, None,), # 11
(12, TType.I64, 'object_size', None, None,), # 12
(13, TType.STRING, 'object_metadata', None, None,), # 13
)
def __init__(self, object_id=None, object_name=None, object_reference=None,
object_reference_versioned=None, type_string=None,
save_timestamp=None, version=None, saved_by=None,
workspace_id=None, workspace_name=None, object_checksum=None,
object_size=None, object_metadata=None, ):
self.object_id = object_id
self.object_name = object_name
self.object_reference = object_reference
self.object_reference_versioned = object_reference_versioned
self.type_string = type_string
self.save_timestamp = save_timestamp
self.version = version
self.saved_by = saved_by
self.workspace_id = workspace_id
self.workspace_name = workspace_name
self.object_checksum = object_checksum
self.object_size = object_size
self.object_metadata = object_metadata
@raises(rpc_util.InvalidField)
def test_thrift_validate_str_fail():
rpc_util.thrift_validate(Metadata(object_id=12))
@raises(rpc_util.InvalidField)
def test_thrift_validate_int_fail():
rpc_util.thrift_validate(Metadata(workspace_id='hello'))
@raises(rpc_util.InvalidField)
def test_thrift_validate_int_double_fail():
rpc_util.thrift_validate(Metadata(workspace_id=3.5))
@raises(rpc_util.InvalidField)
def test_thrift_validate_double_fail():
rpc_util.thrift_validate(Metadata(save_timestamp=['June']))
def test_thrift_validate_str():
rpc_util.thrift_validate(Metadata(object_id='12'))
rpc_util.thrift_validate(Metadata(object_id=u'12'))
def test_thrift_validate_int():
rpc_util.thrift_validate(Metadata(workspace_id=12))
def test_thrift_validate_int_double():
rpc_util.thrift_validate(Metadata(workspace_id=12.0)) # ok if int.
rpc_util.thrift_validate(Metadata(workspace_id=12))
def test_thrift_validate_double():
rpc_util.thrift_validate(Metadata(save_timestamp=123456))
rpc_util.thrift_validate(Metadata(save_timestamp=123456.7))
def test_thrift_errmsg():
val = 'really big'
try:
rpc_util.thrift_validate(Metadata(object_size=val))
except rpc_util.InvalidField as err:
msg = str(err)
#print("@@ {}".format(msg))
# make sure both type and value appear in error message
assert val in msg # note: assumes string value
assert 'I64' in msg
|
scanon/data_api2
|
lib/doekbase/data_api/tests/test_rpc_util.py
|
Python
|
mit
| 3,869
|
# coding=utf-8
from __future__ import print_function
"""
Place in ~/.octoprint/plugins & restart server to test:
* python_checker and python_updater mechanism
* demotion of pip and python setup.py clean output that
gets written to stderr but isn't as severe as that would
look
Plugin will always demand to update itself, multiple
consecutive runs are not a problem.
"""
import time
NAME = "Always Update"
OLD_VERSION = "1.0.0"
NEW_VERSION = "2.0.0"
class Foo(object):
def get_latest(self, target, check, full_data=None):
information = dict(local=dict(name=OLD_VERSION, value=OLD_VERSION),
remote=dict(name=NEW_VERSION, value=NEW_VERSION))
current = False
return information, current
def can_perform_update(self, target, check):
return True
def perform_update(self, target, check, target_version, log_cb=None):
if not callable(log_cb):
import sys
def log_cb(lines, prefix=None, stream=None, strip=True):
if stream == "stdout":
f = sys.stdout
elif stream == "stderr":
f = sys.stderr
else:
f = None
for line in lines:
print(line, file=f)
log_cb(["Updating Always Update..."])
time.sleep(1)
log_cb(["running clean",
"recursively removing *.pyc from 'src'"],
stream="stdout")
log_cb(["'build/lib' does not exist -- can't clean it",
"'build/bdist.win32' does not exist -- can't clean it",
"'build/scripts-2.7' does not exist -- can't clean it"],
stream="stderr")
log_cb(["removing 'Development\OctoPrint\OctoPrint\src\octoprint_setuptools\__init__.pyc'"],
stream="stdout")
time.sleep(1)
log_cb(["This should be red"],
stream="stderr")
log_cb(["You are using pip version 7.1.2, however version 9.0.1 is available.",
"You should consider upgrading via the 'python -m pip install --upgrade pip' command."],
stream="stderr")
time.sleep(3)
log_cb(["Done!"])
def get_update_information():
foo = Foo()
return dict(
always_update=dict(
displayName=NAME,
displayVersion=OLD_VERSION,
type="python_checker",
python_checker=foo,
python_updater=foo
)
)
__plugin_name__ = NAME
__plugin_hooks__ = {
"octoprint.plugin.softwareupdate.check_config": get_update_information,
}
|
JackGavin13/octoprint-test-not-finished
|
tests/manual_tests/always_update.py
|
Python
|
agpl-3.0
| 2,285
|
#***************************************************************************
#* *
#* Copyright (c) 2016 Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
from __future__ import print_function
import os,FreeCAD,Mesh
__title__="FreeCAD 3DS importer"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
DEBUG = True
## @package import3DS
# \ingroup ARCH
# \brief 3DS file format importer
#
# This module provides tools to import 3DS files.
def check3DS():
"checks if collada if available"
global dom3ds
dom3ds = None
try:
from Dice3DS import dom3ds
except ImportError:
FreeCAD.Console.PrintError("Dice3DS not found, 3DS support is disabled.\n")
return False
else:
return True
def open(filename):
"called when freecad wants to open a file"
if not check3DS():
return
docname = (os.path.splitext(os.path.basename(filename))[0]).encode("utf8")
doc = FreeCAD.newDocument(docname)
doc.Label = decode(docname)
FreeCAD.ActiveDocument = doc
read(filename)
return doc
def insert(filename,docname):
"called when freecad wants to import a file"
if not check3DS():
return
try:
doc = FreeCAD.getDocument(docname)
except NameError:
doc = FreeCAD.newDocument(docname)
FreeCAD.ActiveDocument = doc
read(filename)
return doc
def decode(name):
"decodes encoded strings"
try:
decodedName = (name.decode("utf8"))
except UnicodeDecodeError:
try:
decodedName = (name.decode("latin1"))
except UnicodeDecodeError:
FreeCAD.Console.PrintError(translate("Arch","Error: Couldn't determine character encoding"))
decodedName = name
return decodedName
def read(filename):
dom = dom3ds.read_3ds_file(filename,tight=False)
for j,d_nobj in enumerate(dom.mdata.objects):
if type(d_nobj.obj) != dom3ds.N_TRI_OBJECT:
continue
verts = []
if d_nobj.obj.points:
for d_point in d_nobj.obj.points.array:
verts.append([d_point[0],d_point[1],d_point[2]])
meshdata = []
for d_face in d_nobj.obj.faces.array:
meshdata.append([verts[int(d_face[i])] for i in xrange(3)])
m = [tuple(r) for r in d_nobj.obj.matrix.array]
m = m[0] + m[1] + m[2] + m[3]
placement = FreeCAD.Placement(FreeCAD.Matrix(*m))
mesh = Mesh.Mesh(meshdata)
obj = FreeCAD.ActiveDocument.addObject("Mesh::Feature","Mesh")
obj.Mesh = mesh
obj.Placement = placement
else:
print("Skipping object without vertices array: ",d_nobj.obj)
|
usakhelo/FreeCAD
|
src/Mod/Arch/import3DS.py
|
Python
|
lgpl-2.1
| 4,196
|
# -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilasengine.actores.actor import Actor
class Bala(Actor):
""" Representa una bala que va en línea recta. """
def __init__(self, pilas, x=0, y=0, rotacion=0, velocidad_maxima=9,
angulo_de_movimiento=90):
"""
Construye la Bala.
:param x: Posición x del proyectil.
:param y: Posición y del proyectil.
:param velocidad_maxima: Velocidad máxima que alcanzará el proyectil.
:param angulo_de_movimiento: Angulo en que se moverá el Actor..
"""
super(Bala, self).__init__(pilas=pilas, x=x, y=y)
self.imagen = pilas.imagenes.cargar('disparos/bola_amarilla.png')
self.rotacion = rotacion
self.radio_de_colision = 5
self.hacer(pilas.comportamientos.Proyectil,
velocidad_maxima=velocidad_maxima,
aceleracion=1,
angulo_de_movimiento=angulo_de_movimiento,
gravedad=0)
self.aprender(self.pilas.habilidades.EliminarseSiSaleDePantalla)
self.cuando_se_elimina = None
def eliminar(self):
if self.cuando_se_elimina:
self.cuando_se_elimina(self)
super(Bala, self).eliminar()
|
apehua/pilas
|
pilasengine/actores/bala.py
|
Python
|
lgpl-3.0
| 1,431
|
# Copyright 2013 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
from tempest.common import rest_client
from tempest.common import xml_utils
from tempest import config
from tempest import exceptions
CONF = config.CONF
class AggregatesClientXML(rest_client.RestClient):
TYPE = "xml"
def __init__(self, auth_provider):
super(AggregatesClientXML, self).__init__(auth_provider)
self.service = CONF.compute.catalog_type
def _format_aggregate(self, g):
agg = xml_utils.xml_to_json(g)
aggregate = {}
for key, value in agg.items():
if key == 'hosts':
aggregate['hosts'] = []
for k, v in value.items():
aggregate['hosts'].append(v)
elif key == 'availability_zone':
aggregate[key] = None if value == 'None' else value
else:
aggregate[key] = value
return aggregate
def _parse_array(self, node):
return [self._format_aggregate(x) for x in node]
def list_aggregates(self):
"""Get aggregate list."""
resp, body = self.get("os-aggregates")
aggregates = self._parse_array(etree.fromstring(body))
return resp, aggregates
def get_aggregate(self, aggregate_id):
"""Get details of the given aggregate."""
resp, body = self.get("os-aggregates/%s" % str(aggregate_id))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def create_aggregate(self, name, availability_zone=None):
"""Creates a new aggregate."""
if availability_zone is not None:
post_body = xml_utils.Element("aggregate", name=name,
availability_zone=availability_zone)
else:
post_body = xml_utils.Element("aggregate", name=name)
resp, body = self.post('os-aggregates',
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def update_aggregate(self, aggregate_id, name, availability_zone=None):
"""Update a aggregate."""
if availability_zone is not None:
put_body = xml_utils.Element("aggregate", name=name,
availability_zone=availability_zone)
else:
put_body = xml_utils.Element("aggregate", name=name)
resp, body = self.put('os-aggregates/%s' % str(aggregate_id),
str(xml_utils.Document(put_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def delete_aggregate(self, aggregate_id):
"""Deletes the given aggregate."""
return self.delete("os-aggregates/%s" % str(aggregate_id))
def is_resource_deleted(self, id):
try:
self.get_aggregate(id)
except exceptions.NotFound:
return True
return False
def add_host(self, aggregate_id, host):
"""Adds a host to the given aggregate."""
post_body = xml_utils.Element("add_host", host=host)
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def remove_host(self, aggregate_id, host):
"""Removes a host from the given aggregate."""
post_body = xml_utils.Element("remove_host", host=host)
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
def set_metadata(self, aggregate_id, meta):
"""Replaces the aggregate's existing metadata with new metadata."""
post_body = xml_utils.Element("set_metadata")
metadata = xml_utils.Element("metadata")
post_body.append(metadata)
for k, v in meta.items():
meta = xml_utils.Element(k)
meta.append(xml_utils.Text(v))
metadata.append(meta)
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
str(xml_utils.Document(post_body)))
aggregate = self._format_aggregate(etree.fromstring(body))
return resp, aggregate
|
queria/my-tempest
|
tempest/services/compute/xml/aggregates_client.py
|
Python
|
apache-2.0
| 5,059
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .stats import TotalStat
from .visitor import SuiteVisitor
class TotalStatistics(object):
"""Container for total statistics."""
def __init__(self):
#: Instance of :class:`~robot.model.stats.TotalStat` for critical tests.
self.critical = TotalStat('Critical Tests')
#: Instance of :class:`~robot.model.stats.TotalStat` for all the tests.
self.all = TotalStat('All Tests')
def visit(self, visitor):
visitor.visit_total_statistics(self)
def __iter__(self):
return iter([self.critical, self.all])
@property
def message(self):
"""String representation of the statistics.
For example::
2 critical tests, 1 passed, 1 failed
2 tests total, 1 passed, 1 failed
"""
ctotal, cend, cpass, cfail = self._get_counts(self.critical)
atotal, aend, apass, afail = self._get_counts(self.all)
return ('%d critical test%s, %d passed, %d failed\n'
'%d test%s total, %d passed, %d failed'
% (ctotal, cend, cpass, cfail, atotal, aend, apass, afail))
def _get_counts(self, stat):
ending = 's' if stat.total != 1 else ''
return stat.total, ending, stat.passed, stat.failed
class TotalStatisticsBuilder(SuiteVisitor):
def __init__(self, suite=None):
self.stats = TotalStatistics()
if suite:
suite.visit(self)
def add_test(self, test):
self.stats.all.add_test(test)
if test.critical:
self.stats.critical.add_test(test)
def visit_test(self, test):
self.add_test(test)
def visit_keyword(self, kw):
pass
|
alexandrul-ci/robotframework
|
src/robot/model/totalstatistics.py
|
Python
|
apache-2.0
| 2,319
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from libcloud.utils.py3 import httplib
from io import BytesIO
from mock import Mock
from libcloud.utils.py3 import StringIO
from libcloud.utils.py3 import b
from libcloud.storage.base import StorageDriver
from libcloud.storage.base import DEFAULT_CONTENT_TYPE
from libcloud.test import unittest
from libcloud.test import MockHttp
class BaseMockRawResponse(MockHttp):
def _(self, method, url, body, headers):
body = 'ab'
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def root(self, method, url, body, headers):
body = 'ab'
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
class BaseStorageTests(unittest.TestCase):
def setUp(self):
self.send_called = 0
StorageDriver.connectionCls.conn_class = BaseMockRawResponse
self.driver1 = StorageDriver('username', 'key', host='localhost')
self.driver1.supports_chunked_encoding = True
self.driver2 = StorageDriver('username', 'key', host='localhost')
self.driver2.supports_chunked_encoding = False
self.driver1.strict_mode = False
self.driver1.strict_mode = False
def test__upload_object_iterator_must_have_next_method(self):
valid_iterators = [BytesIO(b('134')), StringIO('bar')]
invalid_iterators = ['foobar', '', False, True, 1, object()]
def upload_func(*args, **kwargs):
return True, 'barfoo', 100
kwargs = {'object_name': 'foo', 'content_type': 'foo/bar',
'upload_func': upload_func, 'upload_func_kwargs': {},
'request_path': '/', 'headers': {}}
for value in valid_iterators:
kwargs['stream'] = value
self.driver1._upload_object(**kwargs)
for value in invalid_iterators:
kwargs['stream'] = value
try:
self.driver1._upload_object(**kwargs)
except AttributeError:
pass
else:
self.fail('Exception was not thrown')
def test__get_hash_function(self):
self.driver1.hash_type = 'md5'
func = self.driver1._get_hash_function()
self.assertTrue(func)
self.driver1.hash_type = 'sha1'
func = self.driver1._get_hash_function()
self.assertTrue(func)
try:
self.driver1.hash_type = 'invalid-hash-function'
func = self.driver1._get_hash_function()
except RuntimeError:
pass
else:
self.fail('Invalid hash type but exception was not thrown')
def test_upload_no_content_type_supplied_or_detected(self):
iterator = StringIO()
upload_func = Mock()
upload_func.return_value = True, '', 0
# strict_mode is disabled, default content type should be used
self.driver1.connection = Mock()
self.driver1._upload_object(object_name='test',
content_type=None,
upload_func=upload_func,
upload_func_kwargs={},
request_path='/',
stream=iterator)
headers = self.driver1.connection.request.call_args[-1]['headers']
self.assertEqual(headers['Content-Type'], DEFAULT_CONTENT_TYPE)
# strict_mode is enabled, exception should be thrown
self.driver1.strict_mode = True
expected_msg = ('File content-type could not be guessed and no'
' content_type value is provided')
self.assertRaisesRegexp(AttributeError, expected_msg,
self.driver1._upload_object,
object_name='test',
content_type=None,
upload_func=upload_func,
upload_func_kwargs={},
request_path='/',
stream=iterator)
if __name__ == '__main__':
sys.exit(unittest.main())
|
t-tran/libcloud
|
libcloud/test/storage/test_base.py
|
Python
|
apache-2.0
| 4,866
|
"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cortex_m import CortexM, DHCSR, DBGKEY, C_DEBUGEN, C_MASKINTS, C_STEP, DEMCR, VC_CORERESET, NVIC_AIRCR, NVIC_AIRCR_VECTKEY, NVIC_AIRCR_SYSRESETREQ
from .memory_map import (FlashRegion, RamRegion, MemoryMap)
from pyOCD.target.target import TARGET_RUNNING, TARGET_HALTED
import logging
# NRF51 specific registers
RESET = 0x40000544
RESET_ENABLE = (1 << 0)
class NRF51(CortexM):
memoryMap = MemoryMap(
FlashRegion( start=0, length=0x40000, blocksize=0x400, isBootMemory=True),
RamRegion( start=0x20000000, length=0x4000)
)
def __init__(self, transport):
super(NRF51, self).__init__(transport, self.memoryMap)
def resetn(self):
"""
reset a core. After a call to this function, the core
is running
"""
#Regular reset will kick NRF out of DBG mode
logging.debug("target_nrf51.reset: enable reset pin")
self.writeMemory(RESET, RESET_ENABLE)
#reset
logging.debug("target_nrf51.reset: trigger nRST pin")
CortexM.reset(self)
|
ARMmbed/yotta_osx_installer
|
workspace/lib/python2.7/site-packages/pyOCD/target/target_nrf51.py
|
Python
|
apache-2.0
| 1,678
|
"""Tests for distutils.command.check."""
import os
import textwrap
import unittest
from test.support import run_unittest
from distutils.command.check import check, HAS_DOCUTILS
from distutils.tests import support
from distutils.errors import DistutilsSetupError
try:
import pygments
except ImportError:
pygments = None
HERE = os.path.dirname(__file__)
class CheckTestCase(support.LoggingSilencer,
support.TempdirManager,
unittest.TestCase):
def _run(self, metadata=None, cwd=None, **options):
if metadata is None:
metadata = {}
if cwd is not None:
old_dir = os.getcwd()
os.chdir(cwd)
pkg_info, dist = self.create_dist(**metadata)
cmd = check(dist)
cmd.initialize_options()
for name, value in options.items():
setattr(cmd, name, value)
cmd.ensure_finalized()
cmd.run()
if cwd is not None:
os.chdir(old_dir)
return cmd
def test_check_metadata(self):
# let's run the command with no metadata at all
# by default, check is checking the metadata
# should have some warnings
cmd = self._run()
self.assertEqual(cmd._warnings, 2)
# now let's add the required fields
# and run it again, to make sure we don't get
# any warning anymore
metadata = {'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx',
'name': 'xxx', 'version': 'xxx'}
cmd = self._run(metadata)
self.assertEqual(cmd._warnings, 0)
# now with the strict mode, we should
# get an error if there are missing metadata
self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1})
# and of course, no error when all metadata are present
cmd = self._run(metadata, strict=1)
self.assertEqual(cmd._warnings, 0)
# now a test with non-ASCII characters
metadata = {'url': 'xxx', 'author': '\u00c9ric',
'author_email': 'xxx', 'name': 'xxx',
'version': 'xxx',
'description': 'Something about esszet \u00df',
'long_description': 'More things about esszet \u00df'}
cmd = self._run(metadata)
self.assertEqual(cmd._warnings, 0)
@unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
def test_check_document(self):
pkg_info, dist = self.create_dist()
cmd = check(dist)
# let's see if it detects broken rest
broken_rest = 'title\n===\n\ntest'
msgs = cmd._check_rst_data(broken_rest)
self.assertEqual(len(msgs), 1)
# and non-broken rest
rest = 'title\n=====\n\ntest'
msgs = cmd._check_rst_data(rest)
self.assertEqual(len(msgs), 0)
@unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
def test_check_restructuredtext(self):
# let's see if it detects broken rest in long_description
broken_rest = 'title\n===\n\ntest'
pkg_info, dist = self.create_dist(long_description=broken_rest)
cmd = check(dist)
cmd.check_restructuredtext()
self.assertEqual(cmd._warnings, 1)
# let's see if we have an error with strict=1
metadata = {'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx',
'name': 'xxx', 'version': 'xxx',
'long_description': broken_rest}
self.assertRaises(DistutilsSetupError, self._run, metadata,
**{'strict': 1, 'restructuredtext': 1})
# and non-broken rest, including a non-ASCII character to test #12114
metadata['long_description'] = 'title\n=====\n\ntest \u00df'
cmd = self._run(metadata, strict=1, restructuredtext=1)
self.assertEqual(cmd._warnings, 0)
# check that includes work to test #31292
metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst'
cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1)
self.assertEqual(cmd._warnings, 0)
@unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
def test_check_restructuredtext_with_syntax_highlight(self):
# Don't fail if there is a `code` or `code-block` directive
example_rst_docs = []
example_rst_docs.append(textwrap.dedent("""\
Here's some code:
.. code:: python
def foo():
pass
"""))
example_rst_docs.append(textwrap.dedent("""\
Here's some code:
.. code-block:: python
def foo():
pass
"""))
for rest_with_code in example_rst_docs:
pkg_info, dist = self.create_dist(long_description=rest_with_code)
cmd = check(dist)
cmd.check_restructuredtext()
msgs = cmd._check_rst_data(rest_with_code)
if pygments is not None:
self.assertEqual(len(msgs), 0)
else:
self.assertEqual(len(msgs), 1)
self.assertEqual(
str(msgs[0][1]),
'Cannot analyze code. Pygments package not found.'
)
def test_check_all(self):
metadata = {'url': 'xxx', 'author': 'xxx'}
self.assertRaises(DistutilsSetupError, self._run,
{}, **{'strict': 1,
'restructuredtext': 1})
def test_suite():
return unittest.makeSuite(CheckTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
batermj/algorithm-challenger
|
code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/distutils/tests/test_check.py
|
Python
|
apache-2.0
| 5,711
|
"""Set of utility functions for working with OS commands.
Functions in this module return the command string. These commands are composed but not executed.
"""
import os
from subprocess import call
HADOOP_CONF_DIR = '/etc/hadoop/conf'
def encrypt(key_file):
"""
Encrypt the data from stdin and write output to stdout.
:param key_file: The key file used to encrypt the stream.
"""
if not os.path.isfile(key_file):
raise ValueError("Cannot find key_file: %" % key_file)
return "openssl aes-256-cbc -salt -pass file:%s" % key_file
def decrypt(key_file):
"""
Decrypt the data from stdin and write output to stdout.
:param key_file: The key file used to decrypt the stream.
"""
if not os.path.isfile(key_file):
raise ValueError("Cannot find key_file: %" % key_file)
return "openssl aes-256-cbc -d -pass file:%s" % key_file
def compress(extension):
"""
Compress the data from stdin and write output to stdout.
:param extension: The compression format identified by the file extension. Allowed values are:
'gz' for gzip, 'bz' or 'bz2' for bzip.
"""
if extension == "gz":
cmd = "pigz" if exists("pigz") else "gzip"
elif extension == "bz" or extension == "bz2":
cmd = "bzip2"
elif extension == 'lzo':
cmd = "lzop"
else:
raise ValueError("Unknown compression format/file extension")
return cmd
def decompress(extension):
"""
Decompress the data from stdin and write output to stdout.
:param extension: The compression format identified by the file extension. Allowed values are:
'gz' for gzip, 'bz' or 'bz2' for bzip.
"""
if extension == "gz":
cmd = "pigz -d" if exists("pigz") else "gzip -d"
elif extension == "bz" or extension == "bz2":
cmd = "bzip2 -d"
elif extension == 'lzo':
cmd = "lzop -d"
else:
raise ValueError("Unknown compression format/file extension")
return cmd
def hdfs_cat(uri, conf=HADOOP_CONF_DIR):
"""
Fetch the data from the specified uri and write output to stdout.
:param uri: The HDFS URI.
:param conf: The hadoop config directory.
"""
return "hadoop --config %s dfs -cat %s" % (conf, uri)
def pv(size):
"""
Monitor the progress of data through a pipe. If 'pv' is not available, simply 'cat' it.
:param size: The size of the data, to calculate percentage.
"""
if exists('pv'):
return "pv --wait --size %s" % size
else:
return "cat"
def untar(directory):
"""
Untar the data from stdin into the specified directory.
:param directory: The directory to write files to.
"""
return "tar -C %s -x" % directory
def tar(path):
"""
Tar the path and write output to stdout.
:param path: All contents under path are 'tar'ed.
"""
if not os.path.exists(path):
raise ValueError("Invalid argument: 'path' doesn't exist")
path = path.rstrip(os.sep)
parent, base = os.path.split(path)
return "tar -C %s %s" % (parent, base)
def exists(cmd):
"""Return true if 'cmd' exists in $PATH."""
with open(os.devnull, "w") as f:
return call(['which', cmd], stdout=f) == 0 # No stdout.
|
apache/incubator-cotton
|
mysos/executor/shell_utils.py
|
Python
|
apache-2.0
| 3,152
|
"""login is not nullable
Revision ID: 105c1c44ff70
Revises: 2003c675a267
Create Date: 2013-12-09 10:52:50.646000
"""
# revision identifiers, used by Alembic.
revision = '105c1c44ff70'
down_revision = '2003c675a267'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('labmanager_users', 'login',
existing_type=mysql.VARCHAR(length=50),
nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('labmanager_users', 'login',
existing_type=mysql.VARCHAR(length=50),
nullable=True)
### end Alembic commands ###
|
gateway4labs/labmanager
|
alembic/versions/105c1c44ff70_login_is_not_nullabl.py
|
Python
|
bsd-2-clause
| 832
|
"""
========================
Broadcasting over arrays
========================
The term broadcasting describes how numpy treats arrays with different
shapes during arithmetic operations. Subject to certain constraints,
the smaller array is "broadcast" across the larger array so that they
have compatible shapes. Broadcasting provides a means of vectorizing
array operations so that looping occurs in C instead of Python. It does
this without making needless copies of data and usually leads to
efficient algorithm implementations. There are, however, cases where
broadcasting is a bad idea because it leads to inefficient use of memory
that slows computation.
NumPy operations are usually done element-by-element, which requires two
arrays to have exactly the same shape::
>>> a = np.array([1.0, 2.0, 3.0])
>>> b = np.array([2.0, 2.0, 2.0])
>>> a * b
array([ 2., 4., 6.])
NumPy's broadcasting rule relaxes this constraint when the arrays'
shapes meet certain constraints. The simplest broadcasting example occurs
when an array and a scalar value are combined in an operation:
>>> a = np.array([1.0, 2.0, 3.0])
>>> b = 2.0
>>> a * b
array([ 2., 4., 6.])
The result is equivalent to the previous example where ``b`` was an array.
We can think of the scalar ``b`` being *stretched* during the arithmetic
operation into an array with the same shape as ``a``. The new elements in
``b`` are simply copies of the original scalar. The stretching analogy is
only conceptual. NumPy is smart enough to use the original scalar value
without actually making copies, so that broadcasting operations are as
memory and computationally efficient as possible.
The second example is more effective than the first, since here broadcasting
moves less memory around during the multiplication (``b`` is a scalar,
not an array).
General Broadcasting Rules
==========================
When operating on two arrays, NumPy compares their shapes element-wise.
It starts with the trailing dimensions, and works its way forward. Two
dimensions are compatible when
1) they are equal, or
2) one of them is 1
If these conditions are not met, a
``ValueError: frames are not aligned`` exception is thrown, indicating that
the arrays have incompatible shapes. The size of the resulting array
is the maximum size along each dimension of the input arrays.
Arrays do not need to have the same *number* of dimensions. For example,
if you have a ``256x256x3`` array of RGB values, and you want to scale
each color in the image by a different value, you can multiply the image
by a one-dimensional array with 3 values. Lining up the sizes of the
trailing axes of these arrays according to the broadcast rules, shows that
they are compatible::
Image (3d array): 256 x 256 x 3
Scale (1d array): 3
Result (3d array): 256 x 256 x 3
When either of the dimensions compared is one, the larger of the two is
used. In other words, the smaller of two axes is stretched or "copied"
to match the other.
In the following example, both the ``A`` and ``B`` arrays have axes with
length one that are expanded to a larger size during the broadcast
operation::
A (4d array): 8 x 1 x 6 x 1
B (3d array): 7 x 1 x 5
Result (4d array): 8 x 7 x 6 x 5
Here are some more examples::
A (2d array): 5 x 4
B (1d array): 1
Result (2d array): 5 x 4
A (2d array): 5 x 4
B (1d array): 4
Result (2d array): 5 x 4
A (3d array): 15 x 3 x 5
B (3d array): 15 x 1 x 5
Result (3d array): 15 x 3 x 5
A (3d array): 15 x 3 x 5
B (2d array): 3 x 5
Result (3d array): 15 x 3 x 5
A (3d array): 15 x 3 x 5
B (2d array): 3 x 1
Result (3d array): 15 x 3 x 5
Here are examples of shapes that do not broadcast::
A (1d array): 3
B (1d array): 4 # trailing dimensions do not match
A (2d array): 2 x 1
B (3d array): 8 x 4 x 3 # second from last dimensions mismatch
An example of broadcasting in practice::
>>> x = np.arange(4)
>>> xx = x.reshape(4,1)
>>> y = np.ones(5)
>>> z = np.ones((3,4))
>>> x.shape
(4,)
>>> y.shape
(5,)
>>> x + y
<type 'exceptions.ValueError'>: shape mismatch: objects cannot be broadcast to a single shape
>>> xx.shape
(4, 1)
>>> y.shape
(5,)
>>> (xx + y).shape
(4, 5)
>>> xx + y
array([[ 1., 1., 1., 1., 1.],
[ 2., 2., 2., 2., 2.],
[ 3., 3., 3., 3., 3.],
[ 4., 4., 4., 4., 4.]])
>>> x.shape
(4,)
>>> z.shape
(3, 4)
>>> (x + z).shape
(3, 4)
>>> x + z
array([[ 1., 2., 3., 4.],
[ 1., 2., 3., 4.],
[ 1., 2., 3., 4.]])
Broadcasting provides a convenient way of taking the outer product (or
any other outer operation) of two arrays. The following example shows an
outer addition operation of two 1-d arrays::
>>> a = np.array([0.0, 10.0, 20.0, 30.0])
>>> b = np.array([1.0, 2.0, 3.0])
>>> a[:, np.newaxis] + b
array([[ 1., 2., 3.],
[ 11., 12., 13.],
[ 21., 22., 23.],
[ 31., 32., 33.]])
Here the ``newaxis`` index operator inserts a new axis into ``a``,
making it a two-dimensional ``4x1`` array. Combining the ``4x1`` array
with ``b``, which has shape ``(3,)``, yields a ``4x3`` array.
See `this article <http://www.scipy.org/EricsBroadcastingDoc>`_
for illustrations of broadcasting concepts.
"""
|
illume/numpy3k
|
numpy/doc/broadcasting.py
|
Python
|
bsd-3-clause
| 5,413
|
#!/usr/bin/env python
"""
Custom test runner
If args or options, we run the testsuite as quickly as possible.
If args but no options, we default to using the spec plugin and aborting on
first error/failure.
If options, we ignore defaults and pass options onto Nose.
Examples:
Run all tests (as fast as possible)
$ ./runtests.py
Run all unit tests (using spec output)
$ ./runtests.py tests/unit
Run all checkout unit tests (using spec output)
$ ./runtests.py tests/unit/checkout
Run all tests relating to shipping
$ ./runtests.py --attr=shipping
Re-run failing tests (needs to be run twice to first build the index)
$ ./runtests.py ... --failed
Drop into pdb when a test fails
$ ./runtests.py ... --pdb-failures
"""
import sys
import logging
import warnings
from tests.config import configure
from django.utils.six.moves import map
# No logging
logging.disable(logging.CRITICAL)
def run_tests(verbosity, *test_args):
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner(verbosity=verbosity)
if not test_args:
test_args = ['tests']
num_failures = test_runner.run_tests(test_args)
if num_failures:
sys.exit(num_failures)
if __name__ == '__main__':
args = sys.argv[1:]
verbosity = 1
if not args:
# If run with no args, try and run the testsuite as fast as possible.
# That means across all cores and with no high-falutin' plugins.
import multiprocessing
try:
num_cores = multiprocessing.cpu_count()
except NotImplementedError:
num_cores = 4 # Guess
args = ['--nocapture', '--stop', '--processes=%s' % num_cores]
else:
# Some args/options specified. Check to see if any nose options have
# been specified. If they have, then don't set any
has_options = any(map(lambda x: x.startswith('--'), args))
if not has_options:
# Default options:
# --stop Abort on first error/failure
# --nocapture Don't capture STDOUT
args.extend(['--nocapture', '--stop'])
else:
# Remove options as nose will pick these up from sys.argv
for arg in args:
if arg.startswith('--verbosity'):
verbosity = int(arg[-1])
args = [arg for arg in args if not arg.startswith('-')]
configure()
with warnings.catch_warnings():
# The warnings module in default configuration will never cause tests
# to fail, as it never raises an exception. We alter that behaviour by
# turning DeprecationWarnings into exceptions, but exclude warnings
# triggered by third-party libs. Note: The context manager is not thread
# safe. Behaviour with multiple threads is undefined.
warnings.filterwarnings('error', category=DeprecationWarning)
warnings.filterwarnings('error', category=RuntimeWarning)
libs = r'(sorl\.thumbnail.*|bs4.*|webtest.*)'
warnings.filterwarnings(
'ignore', r'.*', DeprecationWarning, libs)
run_tests(verbosity, *args)
|
kapt/django-oscar
|
runtests.py
|
Python
|
bsd-3-clause
| 3,114
|
from setuptools import setup
import pybvc
setup(
name='pybvc',
version=pybvc.__version__,
description='A python library for programming your network via the Brocade Vyatta Controller (BVC)',
long_description=open('README.rst').read(),
author='Elbrys Networks',
author_email='jeb@elbrys.com',
url='https://github.com/brcdcomm/pybvc',
packages=['pybvc',
'pybvc.common',
'pybvc.controller',
'pybvc.netconfdev',
'pybvc.netconfdev.vrouter',
'pybvc.netconfdev.vdx',
'pybvc.openflowdev'
],
install_requires=['requests>=1.0.0',
'PyYAML',
'xmltodict'],
zip_safe=False,
include_package_data=True,
platforms='any',
license='BSD',
keywords='sdn nfv bvc brocade vyatta controller network vrouter',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: System Administrators',
'Topic :: System :: Networking',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
]
)
|
tnadeau/pybvc
|
setup.py
|
Python
|
bsd-3-clause
| 1,243
|
"""
Implementation of various trading strategies.
"""
from cointrol.core.models import (
Order, TradingSession,
RelativeStrategyProfile, FixedStrategyProfile
)
class TradeAction:
BUY, SELL = Order.BUY, Order.SELL
def __init__(self, action, price):
self.action = action
self.price = price
def __str__(self):
return '{action} at ${price}'.format(
action=Order.TYPES[self.action],
price=self.price
)
class BaseTradingStrategy:
def __init__(self,
session: TradingSession,
last_order: Order):
self.session = session
self.profile = session.profile
self.last_order = last_order
def get_trade_action(self) -> TradeAction:
if self.last_order.type == Order.SELL:
return TradeAction(action=Order.BUY,
price=self.get_buy_price())
else:
return TradeAction(action=Order.SELL,
price=self.get_sell_price())
def get_buy_price(self):
raise NotImplementedError
def get_sell_price(self):
raise NotImplementedError
class FixedStrategy(BaseTradingStrategy):
profile = None
""":type: FixedStrategyProfile"""
def get_buy_price(self):
return self.profile.buy
def get_sell_price(self):
return self.profile.sell
class RelativeStrategy(BaseTradingStrategy):
profile = None
""":type: RelativeStrategyProfile"""
def get_buy_price(self):
return self.last_order.price * (self.profile.buy / 100)
def get_sell_price(self):
return self.last_order.price * (self.profile.sell / 100)
# {Profile model class: implementation class}
MAPPING = {
FixedStrategyProfile: FixedStrategy,
RelativeStrategyProfile: RelativeStrategy,
}
def get_for_session(session, latest_order) -> BaseTradingStrategy:
implementation_class = MAPPING[type(session.profile)]
return implementation_class(session, latest_order)
|
iamkingmaker/cointrol
|
cointrol/trader/strategies.py
|
Python
|
mit
| 2,030
|
from __future__ import absolute_import
import sys
import types
from contextlib import contextmanager
from kombu.utils.encoding import str_to_bytes
from celery import signature
from celery import states
from celery import group
from celery.backends.cache import CacheBackend, DummyClient
from celery.exceptions import ImproperlyConfigured
from celery.five import items, string, text_t
from celery.utils import uuid
from celery.tests.case import (
AppCase, Mock, mask_modules, patch, reset_modules,
)
PY3 = sys.version_info[0] == 3
class SomeClass(object):
def __init__(self, data):
self.data = data
class test_CacheBackend(AppCase):
def setup(self):
self.tb = CacheBackend(backend='memory://', app=self.app)
self.tid = uuid()
def test_no_backend(self):
self.app.conf.CELERY_CACHE_BACKEND = None
with self.assertRaises(ImproperlyConfigured):
CacheBackend(backend=None, app=self.app)
def test_mark_as_done(self):
self.assertEqual(self.tb.get_status(self.tid), states.PENDING)
self.assertIsNone(self.tb.get_result(self.tid))
self.tb.mark_as_done(self.tid, 42)
self.assertEqual(self.tb.get_status(self.tid), states.SUCCESS)
self.assertEqual(self.tb.get_result(self.tid), 42)
def test_is_pickled(self):
result = {'foo': 'baz', 'bar': SomeClass(12345)}
self.tb.mark_as_done(self.tid, result)
# is serialized properly.
rindb = self.tb.get_result(self.tid)
self.assertEqual(rindb.get('foo'), 'baz')
self.assertEqual(rindb.get('bar').data, 12345)
def test_mark_as_failure(self):
try:
raise KeyError('foo')
except KeyError as exception:
self.tb.mark_as_failure(self.tid, exception)
self.assertEqual(self.tb.get_status(self.tid), states.FAILURE)
self.assertIsInstance(self.tb.get_result(self.tid), KeyError)
def test_apply_chord(self):
tb = CacheBackend(backend='memory://', app=self.app)
gid, res = uuid(), [self.app.AsyncResult(uuid()) for _ in range(3)]
tb.apply_chord(group(app=self.app), (), gid, {}, result=res)
@patch('celery.result.GroupResult.restore')
def test_on_chord_part_return(self, restore):
tb = CacheBackend(backend='memory://', app=self.app)
deps = Mock()
deps.__len__ = Mock()
deps.__len__.return_value = 2
restore.return_value = deps
task = Mock()
task.name = 'foobarbaz'
self.app.tasks['foobarbaz'] = task
task.request.chord = signature(task)
gid, res = uuid(), [self.app.AsyncResult(uuid()) for _ in range(3)]
task.request.group = gid
tb.apply_chord(group(app=self.app), (), gid, {}, result=res)
self.assertFalse(deps.join_native.called)
tb.on_chord_part_return(task, 'SUCCESS', 10)
self.assertFalse(deps.join_native.called)
tb.on_chord_part_return(task, 'SUCCESS', 10)
deps.join_native.assert_called_with(propagate=True, timeout=3.0)
deps.delete.assert_called_with()
def test_mget(self):
self.tb.set('foo', 1)
self.tb.set('bar', 2)
self.assertDictEqual(self.tb.mget(['foo', 'bar']),
{'foo': 1, 'bar': 2})
def test_forget(self):
self.tb.mark_as_done(self.tid, {'foo': 'bar'})
x = self.app.AsyncResult(self.tid, backend=self.tb)
x.forget()
self.assertIsNone(x.result)
def test_process_cleanup(self):
self.tb.process_cleanup()
def test_expires_as_int(self):
tb = CacheBackend(backend='memory://', expires=10, app=self.app)
self.assertEqual(tb.expires, 10)
def test_unknown_backend_raises_ImproperlyConfigured(self):
with self.assertRaises(ImproperlyConfigured):
CacheBackend(backend='unknown://', app=self.app)
class MyMemcachedStringEncodingError(Exception):
pass
class MemcachedClient(DummyClient):
def set(self, key, value, *args, **kwargs):
if PY3:
key_t, must_be, not_be, cod = bytes, 'string', 'bytes', 'decode'
else:
key_t, must_be, not_be, cod = text_t, 'bytes', 'string', 'encode'
if isinstance(key, key_t):
raise MyMemcachedStringEncodingError(
'Keys must be {0}, not {1}. Convert your '
'strings using mystring.{2}(charset)!'.format(
must_be, not_be, cod))
return super(MemcachedClient, self).set(key, value, *args, **kwargs)
class MockCacheMixin(object):
@contextmanager
def mock_memcache(self):
memcache = types.ModuleType('memcache')
memcache.Client = MemcachedClient
memcache.Client.__module__ = memcache.__name__
prev, sys.modules['memcache'] = sys.modules.get('memcache'), memcache
try:
yield True
finally:
if prev is not None:
sys.modules['memcache'] = prev
@contextmanager
def mock_pylibmc(self):
pylibmc = types.ModuleType('pylibmc')
pylibmc.Client = MemcachedClient
pylibmc.Client.__module__ = pylibmc.__name__
prev = sys.modules.get('pylibmc')
sys.modules['pylibmc'] = pylibmc
try:
yield True
finally:
if prev is not None:
sys.modules['pylibmc'] = prev
class test_get_best_memcache(AppCase, MockCacheMixin):
def test_pylibmc(self):
with self.mock_pylibmc():
with reset_modules('celery.backends.cache'):
from celery.backends import cache
cache._imp = [None]
self.assertEqual(cache.get_best_memcache()[0].__module__,
'pylibmc')
def test_memcache(self):
with self.mock_memcache():
with reset_modules('celery.backends.cache'):
with mask_modules('pylibmc'):
from celery.backends import cache
cache._imp = [None]
self.assertEqual(cache.get_best_memcache()[0]().__module__,
'memcache')
def test_no_implementations(self):
with mask_modules('pylibmc', 'memcache'):
with reset_modules('celery.backends.cache'):
from celery.backends import cache
cache._imp = [None]
with self.assertRaises(ImproperlyConfigured):
cache.get_best_memcache()
def test_cached(self):
with self.mock_pylibmc():
with reset_modules('celery.backends.cache'):
from celery.backends import cache
cache._imp = [None]
cache.get_best_memcache()[0](behaviors={'foo': 'bar'})
self.assertTrue(cache._imp[0])
cache.get_best_memcache()[0]()
def test_backends(self):
from celery.backends.cache import backends
with self.mock_memcache():
for name, fun in items(backends):
self.assertTrue(fun())
class test_memcache_key(AppCase, MockCacheMixin):
def test_memcache_unicode_key(self):
with self.mock_memcache():
with reset_modules('celery.backends.cache'):
with mask_modules('pylibmc'):
from celery.backends import cache
cache._imp = [None]
task_id, result = string(uuid()), 42
b = cache.CacheBackend(backend='memcache', app=self.app)
b.store_result(task_id, result, status=states.SUCCESS)
self.assertEqual(b.get_result(task_id), result)
def test_memcache_bytes_key(self):
with self.mock_memcache():
with reset_modules('celery.backends.cache'):
with mask_modules('pylibmc'):
from celery.backends import cache
cache._imp = [None]
task_id, result = str_to_bytes(uuid()), 42
b = cache.CacheBackend(backend='memcache', app=self.app)
b.store_result(task_id, result, status=states.SUCCESS)
self.assertEqual(b.get_result(task_id), result)
def test_pylibmc_unicode_key(self):
with reset_modules('celery.backends.cache'):
with self.mock_pylibmc():
from celery.backends import cache
cache._imp = [None]
task_id, result = string(uuid()), 42
b = cache.CacheBackend(backend='memcache', app=self.app)
b.store_result(task_id, result, status=states.SUCCESS)
self.assertEqual(b.get_result(task_id), result)
def test_pylibmc_bytes_key(self):
with reset_modules('celery.backends.cache'):
with self.mock_pylibmc():
from celery.backends import cache
cache._imp = [None]
task_id, result = str_to_bytes(uuid()), 42
b = cache.CacheBackend(backend='memcache', app=self.app)
b.store_result(task_id, result, status=states.SUCCESS)
self.assertEqual(b.get_result(task_id), result)
|
sunze/py_flask
|
venv/lib/python3.4/site-packages/celery/tests/backends/test_cache.py
|
Python
|
mit
| 9,216
|
#!/usr/bin/env python
#
# This is run by Travis-CI before an upgrade to load some data into the
# database. After the upgrade is complete, the data is verified by
# upgrade-after.py to make sure that the upgrade of the database went smoothly.
#
import logging
import unittest
import sys
sys.path.insert(0, '..')
sys.path.insert(0, '../pynipap')
sys.path.insert(0, '../nipap')
sys.path.insert(0, '../nipap-cli')
from nipap.backend import Nipap
from nipap.authlib import SqliteAuth
from nipap.nipapconfig import NipapConfig
from pynipap import AuthOptions, VRF, Pool, Prefix, NipapNonExistentError, NipapDuplicateError, NipapValueError
import pynipap
pynipap.xmlrpc_uri = 'http://unittest:gottatest@127.0.0.1:1337'
o = AuthOptions({
'authoritative_source': 'nipap'
})
class TestHelper:
@classmethod
def clear_database(cls):
cfg = NipapConfig('/etc/nipap/nipap.conf')
n = Nipap()
# have to delete hosts before we can delete the rest
n._execute("DELETE FROM ip_net_plan WHERE masklen(prefix) = 32")
# the rest
n._execute("DELETE FROM ip_net_plan")
# delete all except for the default VRF with id 0
n._execute("DELETE FROM ip_net_vrf WHERE id > 0")
# set default info for VRF 0
n._execute("UPDATE ip_net_vrf SET name = 'default', description = 'The default VRF, typically the Internet.' WHERE id = 0")
n._execute("DELETE FROM ip_net_pool")
n._execute("DELETE FROM ip_net_asn")
def add_prefix(self, prefix, type, description, tags=None):
if tags is None:
tags = []
p = Prefix()
p.prefix = prefix
p.type = type
p.description = description
p.tags = tags
p.save()
return p
class TestLoad(unittest.TestCase):
""" Load some data into the database
"""
def test_load_data(self):
"""
"""
th = TestHelper()
p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'test')
p2 = th.add_prefix('192.168.0.0/20', 'reservation', 'test')
p3 = th.add_prefix('192.168.0.0/24', 'reservation', 'test')
p4 = th.add_prefix('192.168.1.0/24', 'reservation', 'test')
p5 = th.add_prefix('192.168.2.0/24', 'reservation', 'test')
p6 = th.add_prefix('192.168.32.0/20', 'reservation', 'test')
p7 = th.add_prefix('192.168.32.0/24', 'reservation', 'test')
p8 = th.add_prefix('192.168.32.1/32', 'reservation', 'test')
ps1 = th.add_prefix('2001:db8:1::/48', 'reservation', 'test')
ps2 = th.add_prefix('2001:db8:1::/64', 'reservation', 'test')
ps3 = th.add_prefix('2001:db8:2::/48', 'reservation', 'test')
pool1 = Pool()
pool1.name = 'upgrade-test'
pool1.ipv4_default_prefix_length = 31
pool1.ipv6_default_prefix_length = 112
pool1.save()
p2.pool = pool1
p2.save()
ps1.pool = pool1
ps1.save()
pool2 = Pool()
pool2.name = 'upgrade-test2'
pool2.save()
vrf1 = VRF()
vrf1.name = 'foo'
vrf1.rt = '123:123'
vrf1.save()
if __name__ == '__main__':
# set up logging
log = logging.getLogger()
logging.basicConfig()
log.setLevel(logging.INFO)
if sys.version_info >= (2,7):
unittest.main(verbosity=2)
else:
unittest.main()
|
ettrig/NIPAP
|
tests/upgrade-before.py
|
Python
|
mit
| 3,381
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, print_function, unicode_literals, \
absolute_import
import os
import unittest
from pymatgen.io.lammps.sets import LammpsInputSet
__author__ = 'Kiran Mathew'
__email__ = 'kmathew@lbl.gov'
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
"test_files", "lammps")
class TestLammpsInputSet(unittest.TestCase):
def setUp(self):
template_file = os.path.join(test_dir, "in.peptide.template")
data_file = os.path.join(test_dir, "data.peptide")
self.data_filename = "test_data.peptide"
self.input_filename = "test_input.peptide"
self.settings = {
"pair_style": "lj/charmm/coul/long 8.0 10.0 10.0",
"kspace_style": "pppm 0.0001",
"fix_1": "1 all nvt temp 275.0 275.0 100.0 tchain 1",
"fix_2": "2 all shake 0.0001 10 100 b 4 6 8 10 12 14 18 a 31"
}
self.lammps_input_set = LammpsInputSet.from_file(
"test", template_file, self.settings, lammps_data=data_file,
data_filename=self.data_filename)
def test_input(self):
self.assertEqual(self.lammps_input_set.lammps_input.settings["data_file"],
self.data_filename)
for k, v in self.settings.items():
self.assertEqual(self.lammps_input_set.lammps_input.settings[k], v)
def test_write_input_set(self):
self.lammps_input_set.write_input(self.input_filename)
self.assertTrue(os.path.exists(self.input_filename))
self.assertTrue(os.path.exists(self.data_filename))
os.remove(self.input_filename)
os.remove(self.data_filename)
# now change both input and data filenames
self.lammps_input_set.write_input("xxxx.input", "yyy.data")
self.assertTrue(os.path.exists("xxxx.input"))
self.assertTrue(os.path.exists("yyy.data"))
os.remove("xxxx.input")
os.remove("yyy.data")
if __name__ == "__main__":
unittest.main()
|
johnson1228/pymatgen
|
pymatgen/io/lammps/tests/test_sets.py
|
Python
|
mit
| 2,130
|
# coding: utf-8
from __future__ import absolute_import
import flask
import auth
import model
import util
from main import app
yahoo_config = dict(
access_token_url='https://api.login.yahoo.com/oauth/v2/get_token',
authorize_url='https://api.login.yahoo.com/oauth/v2/request_auth',
base_url='https://query.yahooapis.com/',
consumer_key=model.Config.get_master_db().yahoo_consumer_key,
consumer_secret=model.Config.get_master_db().yahoo_consumer_secret,
request_token_url='https://api.login.yahoo.com/oauth/v2/get_request_token',
)
yahoo = auth.create_oauth_app(yahoo_config, 'yahoo')
@app.route('/api/auth/callback/yahoo/')
def yahoo_authorized():
response = yahoo.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (
response['oauth_token'],
response['oauth_token_secret'],
)
fields = 'guid, emails, familyName, givenName, nickname'
me = yahoo.get(
'/v1/yql',
data={
'format': 'json',
'q': 'select %s from social.profile where guid = me;' % fields,
'realm': 'yahooapis.com',
},
)
user_db = retrieve_user_from_yahoo(me.data['query']['results']['profile'])
return auth.signin_user_db(user_db)
@yahoo.tokengetter
def get_yahoo_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/yahoo/')
def signin_yahoo():
return auth.signin_oauth(yahoo)
def retrieve_user_from_yahoo(response):
auth_id = 'yahoo_%s' % response['guid']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
names = [response.get('givenName', ''), response.get('familyName', '')]
emails = response.get('emails', {})
if not isinstance(emails, list):
emails = [emails]
emails = [e for e in emails if 'handle' in e]
emails.sort(key=lambda e: e.get('primary', False))
email = emails[0]['handle'] if emails else ''
return auth.create_user_db(
auth_id=auth_id,
name=' '.join(names).strip() or response['nickname'],
username=response['nickname'],
email=email,
verified=bool(email),
)
|
gmist/ctm-5studio
|
main/auth/yahoo.py
|
Python
|
mit
| 2,148
|
# Copyright 1999-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import unicode_literals
import formatter
import io
import sys
import time
import portage
from portage import os
from portage import _encodings
from portage import _unicode_encode
from portage.output import xtermTitle
from _emerge.getloadavg import getloadavg
if sys.hexversion >= 0x3000000:
basestring = str
class JobStatusDisplay(object):
_bound_properties = ("curval", "failed", "running")
# Don't update the display unless at least this much
# time has passed, in units of seconds.
_min_display_latency = 2
_default_term_codes = {
'cr' : '\r',
'el' : '\x1b[K',
'nel' : '\n',
}
_termcap_name_map = {
'carriage_return' : 'cr',
'clr_eol' : 'el',
'newline' : 'nel',
}
def __init__(self, quiet=False, xterm_titles=True):
object.__setattr__(self, "quiet", quiet)
object.__setattr__(self, "xterm_titles", xterm_titles)
object.__setattr__(self, "maxval", 0)
object.__setattr__(self, "merges", 0)
object.__setattr__(self, "_changed", False)
object.__setattr__(self, "_displayed", False)
object.__setattr__(self, "_last_display_time", 0)
self.reset()
isatty = os.environ.get('TERM') != 'dumb' and \
hasattr(self.out, 'isatty') and \
self.out.isatty()
object.__setattr__(self, "_isatty", isatty)
if not isatty or not self._init_term():
term_codes = {}
for k, capname in self._termcap_name_map.items():
term_codes[k] = self._default_term_codes[capname]
object.__setattr__(self, "_term_codes", term_codes)
encoding = sys.getdefaultencoding()
for k, v in self._term_codes.items():
if not isinstance(v, basestring):
self._term_codes[k] = v.decode(encoding, 'replace')
if self._isatty:
width = portage.output.get_term_size()[1]
else:
width = 80
self._set_width(width)
def _set_width(self, width):
if width == getattr(self, 'width', None):
return
if width <= 0 or width > 80:
width = 80
object.__setattr__(self, "width", width)
object.__setattr__(self, "_jobs_column_width", width - 32)
@property
def out(self):
"""Use a lazy reference to sys.stdout, in case the API consumer has
temporarily overridden stdout."""
return sys.stdout
def _write(self, s):
# avoid potential UnicodeEncodeError
s = _unicode_encode(s,
encoding=_encodings['stdio'], errors='backslashreplace')
out = self.out
if sys.hexversion >= 0x3000000:
out = out.buffer
out.write(s)
out.flush()
def _init_term(self):
"""
Initialize term control codes.
@rtype: bool
@return: True if term codes were successfully initialized,
False otherwise.
"""
term_type = os.environ.get("TERM", "").strip()
if not term_type:
return False
tigetstr = None
try:
import curses
try:
curses.setupterm(term_type, self.out.fileno())
tigetstr = curses.tigetstr
except curses.error:
pass
except ImportError:
pass
if tigetstr is None:
return False
term_codes = {}
for k, capname in self._termcap_name_map.items():
# Use _native_string for PyPy compat (bug #470258).
code = tigetstr(portage._native_string(capname))
if code is None:
code = self._default_term_codes[capname]
term_codes[k] = code
object.__setattr__(self, "_term_codes", term_codes)
return True
def _format_msg(self, msg):
return ">>> %s" % msg
def _erase(self):
self._write(
self._term_codes['carriage_return'] + \
self._term_codes['clr_eol'])
self._displayed = False
def _display(self, line):
self._write(line)
self._displayed = True
def _update(self, msg):
if not self._isatty:
self._write(self._format_msg(msg) + self._term_codes['newline'])
self._displayed = True
return
if self._displayed:
self._erase()
self._display(self._format_msg(msg))
def displayMessage(self, msg):
was_displayed = self._displayed
if self._isatty and self._displayed:
self._erase()
self._write(self._format_msg(msg) + self._term_codes['newline'])
self._displayed = False
if was_displayed:
self._changed = True
self.display()
def reset(self):
self.maxval = 0
self.merges = 0
for name in self._bound_properties:
object.__setattr__(self, name, 0)
if self._displayed:
self._write(self._term_codes['newline'])
self._displayed = False
def __setattr__(self, name, value):
old_value = getattr(self, name)
if value == old_value:
return
object.__setattr__(self, name, value)
if name in self._bound_properties:
self._property_change(name, old_value, value)
def _property_change(self, name, old_value, new_value):
self._changed = True
self.display()
def _load_avg_str(self):
try:
avg = getloadavg()
except OSError:
return 'unknown'
max_avg = max(avg)
if max_avg < 10:
digits = 2
elif max_avg < 100:
digits = 1
else:
digits = 0
return ", ".join(("%%.%df" % digits ) % x for x in avg)
def display(self):
"""
Display status on stdout, but only if something has
changed since the last call. This always returns True,
for continuous scheduling via timeout_add.
"""
if self.quiet:
return True
current_time = time.time()
time_delta = current_time - self._last_display_time
if self._displayed and \
not self._changed:
if not self._isatty:
return True
if time_delta < self._min_display_latency:
return True
self._last_display_time = current_time
self._changed = False
self._display_status()
return True
def _display_status(self):
# Don't use len(self._completed_tasks) here since that also
# can include uninstall tasks.
curval_str = "%s" % (self.curval,)
maxval_str = "%s" % (self.maxval,)
running_str = "%s" % (self.running,)
failed_str = "%s" % (self.failed,)
load_avg_str = self._load_avg_str()
color_output = io.StringIO()
plain_output = io.StringIO()
style_file = portage.output.ConsoleStyleFile(color_output)
style_file.write_listener = plain_output
style_writer = portage.output.StyleWriter(file=style_file, maxcol=9999)
style_writer.style_listener = style_file.new_styles
f = formatter.AbstractFormatter(style_writer)
number_style = "INFORM"
f.add_literal_data("Jobs: ")
f.push_style(number_style)
f.add_literal_data(curval_str)
f.pop_style()
f.add_literal_data(" of ")
f.push_style(number_style)
f.add_literal_data(maxval_str)
f.pop_style()
f.add_literal_data(" complete")
if self.running:
f.add_literal_data(", ")
f.push_style(number_style)
f.add_literal_data(running_str)
f.pop_style()
f.add_literal_data(" running")
if self.failed:
f.add_literal_data(", ")
f.push_style(number_style)
f.add_literal_data(failed_str)
f.pop_style()
f.add_literal_data(" failed")
padding = self._jobs_column_width - len(plain_output.getvalue())
if padding > 0:
f.add_literal_data(padding * " ")
f.add_literal_data("Load avg: ")
f.add_literal_data(load_avg_str)
# Truncate to fit width, to avoid making the terminal scroll if the
# line overflows (happens when the load average is large).
plain_output = plain_output.getvalue()
if self._isatty and len(plain_output) > self.width:
# Use plain_output here since it's easier to truncate
# properly than the color output which contains console
# color codes.
self._update(plain_output[:self.width])
else:
self._update(color_output.getvalue())
if self.xterm_titles:
# If the HOSTNAME variable is exported, include it
# in the xterm title, just like emergelog() does.
# See bug #390699.
title_str = " ".join(plain_output.split())
hostname = os.environ.get("HOSTNAME")
if hostname is not None:
title_str = "%s: %s" % (hostname, title_str)
xtermTitle(title_str)
|
nullishzero/Portage
|
pym/_emerge/JobStatusDisplay.py
|
Python
|
gpl-2.0
| 7,763
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.builtins import range
import re
import sys
from twisted.enterprise import adbapi
from twisted.internet import defer
from twisted.python import log
from buildbot.process.buildstep import LogLineObserver
from buildbot.steps.shell import Test
class EqConnectionPool(adbapi.ConnectionPool):
"""This class works the same way as
twisted.enterprise.adbapi.ConnectionPool. But it adds the ability to
compare connection pools for equality (by comparing the arguments
passed to the constructor).
This is useful when passing the ConnectionPool to a BuildStep, as
otherwise Buildbot will consider the buildstep (and hence the
containing buildfactory) to have changed every time the configuration
is reloaded.
It also sets some defaults differently from adbapi.ConnectionPool that
are more suitable for use in MTR.
"""
def __init__(self, *args, **kwargs):
self._eqKey = (args, kwargs)
adbapi.ConnectionPool.__init__(self,
cp_reconnect=True, cp_min=1, cp_max=3,
*args, **kwargs)
def __eq__(self, other):
if isinstance(other, EqConnectionPool):
return self._eqKey == other._eqKey
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
class MtrTestFailData:
def __init__(self, testname, variant, result, info, text, callback):
self.testname = testname
self.variant = variant
self.result = result
self.info = info
self.text = text
self.callback = callback
def add(self, line):
self.text += line
def fireCallback(self):
return self.callback(self.testname, self.variant, self.result, self.info, self.text)
class MtrLogObserver(LogLineObserver):
"""
Class implementing a log observer (can be passed to
BuildStep.addLogObserver().
It parses the output of mysql-test-run.pl as used in MySQL,
MariaDB, Drizzle, etc.
It counts number of tests run and uses it to provide more accurate
completion estimates.
It parses out test failures from the output and summarizes the results on
the Waterfall page. It also passes the information to methods that can be
overridden in a subclass to do further processing on the information."""
_line_re = re.compile(
r"^([-._0-9a-zA-z]+)( '[-_ a-zA-Z]+')?\s+(w[0-9]+\s+)?\[ (fail|pass) \]\s*(.*)$")
_line_re2 = re.compile(
r"^[-._0-9a-zA-z]+( '[-_ a-zA-Z]+')?\s+(w[0-9]+\s+)?\[ [-a-z]+ \]")
_line_re3 = re.compile(
r"^\*\*\*Warnings generated in error logs during shutdown after running tests: (.*)")
_line_re4 = re.compile(r"^The servers were restarted [0-9]+ times$")
_line_re5 = re.compile(r"^Only\s+[0-9]+\s+of\s+[0-9]+\s+completed.$")
def __init__(self, textLimit=5, testNameLimit=16, testType=None):
self.textLimit = textLimit
self.testNameLimit = testNameLimit
self.testType = testType
self.numTests = 0
self.testFail = None
self.failList = []
self.warnList = []
LogLineObserver.__init__(self)
def setLog(self, loog):
LogLineObserver.setLog(self, loog)
d = loog.waitUntilFinished()
d.addCallback(lambda l: self.closeTestFail())
def outLineReceived(self, line):
stripLine = line.strip("\r\n")
m = self._line_re.search(stripLine)
if m:
testname, variant, worker, result, info = m.groups()
self.closeTestFail()
self.numTests += 1
self.step.setProgress('tests', self.numTests)
if result == "fail":
if variant is None:
variant = ""
else:
variant = variant[2:-1]
self.openTestFail(
testname, variant, result, info, stripLine + "\n")
else:
m = self._line_re3.search(stripLine)
# pylint: disable=too-many-boolean-expressions
if m:
stuff = m.group(1)
self.closeTestFail()
testList = stuff.split(" ")
self.doCollectWarningTests(testList)
elif (self._line_re2.search(stripLine) or
self._line_re4.search(stripLine) or
self._line_re5.search(stripLine) or
stripLine == "Test suite timeout! Terminating..." or
stripLine.startswith("mysql-test-run: *** ERROR: Not all tests completed") or
(stripLine.startswith("------------------------------------------------------------")
and self.testFail is not None)):
self.closeTestFail()
else:
self.addTestFailOutput(stripLine + "\n")
def openTestFail(self, testname, variant, result, info, line):
self.testFail = MtrTestFailData(
testname, variant, result, info, line, self.doCollectTestFail)
def addTestFailOutput(self, line):
if self.testFail is not None:
self.testFail.add(line)
def closeTestFail(self):
if self.testFail is not None:
self.testFail.fireCallback()
self.testFail = None
def addToText(self, src, dst):
lastOne = None
count = 0
for t in src:
if t != lastOne:
dst.append(t)
count += 1
if count >= self.textLimit:
break
def makeText(self, done):
if done:
text = ["test"]
else:
text = ["testing"]
if self.testType:
text.append(self.testType)
fails = sorted(self.failList[:])
self.addToText(fails, text)
warns = sorted(self.warnList[:])
self.addToText(warns, text)
return text
# Update waterfall status.
def updateText(self):
self.step.step_status.setText(self.makeText(False))
strip_re = re.compile(r"^[a-z]+\.")
def displayTestName(self, testname):
displayTestName = self.strip_re.sub("", testname)
if len(displayTestName) > self.testNameLimit:
displayTestName = displayTestName[
:(self.testNameLimit - 2)] + "..."
return displayTestName
def doCollectTestFail(self, testname, variant, result, info, text):
self.failList.append("F:" + self.displayTestName(testname))
self.updateText()
self.collectTestFail(testname, variant, result, info, text)
def doCollectWarningTests(self, testList):
for t in testList:
self.warnList.append("W:" + self.displayTestName(t))
self.updateText()
self.collectWarningTests(testList)
# These two methods are overridden to actually do something with the data.
def collectTestFail(self, testname, variant, result, info, text):
pass
def collectWarningTests(self, testList):
pass
class MTR(Test):
"""
Build step that runs mysql-test-run.pl, as used in MySQL, Drizzle,
MariaDB, etc.
It uses class MtrLogObserver to parse test results out from the
output of mysql-test-run.pl, providing better completion time
estimates and summarizing test failures on the waterfall page.
It also provides access to mysqld server error logs from the test
run to help debugging any problems.
Optionally, it can insert into a database data about the test run,
including details of any test failures.
Parameters:
textLimit
Maximum number of test failures to show on the waterfall page
(to not flood the page in case of a large number of test
failures. Defaults to 5.
testNameLimit
Maximum length of test names to show unabbreviated in the
waterfall page, to avoid excessive column width. Defaults to 16.
parallel
Value of --parallel option used for mysql-test-run.pl (number
of processes used to run the test suite in parallel). Defaults
to 4. This is used to determine the number of server error log
files to download from the worker. Specifying a too high value
does not hurt (as nonexisting error logs will be ignored),
however if using --parallel value greater than the default it
needs to be specified, or some server error logs will be
missing.
dbpool
An instance of twisted.enterprise.adbapi.ConnectionPool, or None.
Defaults to None. If specified, results are inserted into the database
using the ConnectionPool.
The class process.mtrlogobserver.EqConnectionPool subclass of
ConnectionPool can be useful to pass as value for dbpool, to
avoid having config reloads think the Buildstep is changed
just because it gets a new ConnectionPool instance (even
though connection parameters are unchanged).
autoCreateTables
Boolean, defaults to False. If True (and dbpool is specified), the
necessary database tables will be created automatically if they do
not exist already. Alternatively, the tables can be created manually
from the SQL statements found in the mtrlogobserver.py source file.
test_type
test_info
Two descriptive strings that will be inserted in the database tables if
dbpool is specified. The test_type string, if specified, will also
appear on the waterfall page."""
renderables = ['mtr_subdir']
def __init__(self, dbpool=None, test_type=None, test_info="",
description=None, descriptionDone=None,
autoCreateTables=False, textLimit=5, testNameLimit=16,
parallel=4, logfiles=None, lazylogfiles=True,
warningPattern="MTR's internal check of the test case '.*' failed",
mtr_subdir="mysql-test", **kwargs):
if logfiles is None:
logfiles = {}
if description is None:
description = ["testing"]
if test_type:
description.append(test_type)
if descriptionDone is None:
descriptionDone = ["test"]
if test_type:
descriptionDone.append(test_type)
Test.__init__(self, logfiles=logfiles, lazylogfiles=lazylogfiles,
description=description, descriptionDone=descriptionDone,
warningPattern=warningPattern, **kwargs)
self.dbpool = dbpool
self.test_type = test_type
self.test_info = test_info
self.autoCreateTables = autoCreateTables
self.textLimit = textLimit
self.testNameLimit = testNameLimit
self.parallel = parallel
self.mtr_subdir = mtr_subdir
self.progressMetrics += ('tests',)
def start(self):
# Add mysql server logfiles.
for mtr in range(0, self.parallel + 1):
for mysqld in range(1, 4 + 1):
if mtr == 0:
logname = "mysqld.%d.err" % mysqld
filename = "var/log/mysqld.%d.err" % mysqld
else:
logname = "mysqld.%d.err.%d" % (mysqld, mtr)
filename = "var/%d/log/mysqld.%d.err" % (mtr, mysqld)
self.addLogFile(logname, self.mtr_subdir + "/" + filename)
self.myMtr = self.MyMtrLogObserver(textLimit=self.textLimit,
testNameLimit=self.testNameLimit,
testType=self.test_type)
self.addLogObserver("stdio", self.myMtr)
# Insert a row for this test run into the database and set up
# build properties, then start the command proper.
d = self.registerInDB()
d.addCallback(self.afterRegisterInDB)
d.addErrback(self.failed)
def getText(self, command, results):
return self.myMtr.makeText(True)
def runInteractionWithRetry(self, actionFn, *args, **kw):
"""
Run a database transaction with dbpool.runInteraction, but retry the
transaction in case of a temporary error (like connection lost).
This is needed to be robust against things like database connection
idle timeouts.
The passed callable that implements the transaction must be retryable,
ie. it must not have any destructive side effects in the case where
an exception is thrown and/or rollback occurs that would prevent it
from functioning correctly when called again."""
def runWithRetry(txn, *args, **kw):
retryCount = 0
while(True):
try:
return actionFn(txn, *args, **kw)
except txn.OperationalError:
retryCount += 1
if retryCount >= 5:
raise
excType, excValue, excTraceback = sys.exc_info()
log.msg("Database transaction failed (caught exception %s(%s)), retrying ..." % (
excType, excValue))
txn.close()
txn.reconnect()
txn.reopen()
return self.dbpool.runInteraction(runWithRetry, *args, **kw)
def runQueryWithRetry(self, *args, **kw):
"""
Run a database query, like with dbpool.runQuery, but retry the query in
case of a temporary error (like connection lost).
This is needed to be robust against things like database connection
idle timeouts."""
def runQuery(txn, *args, **kw):
txn.execute(*args, **kw)
return txn.fetchall()
return self.runInteractionWithRetry(runQuery, *args, **kw)
def registerInDB(self):
if self.dbpool:
return self.runInteractionWithRetry(self.doRegisterInDB)
else:
return defer.succeed(0)
# The real database work is done in a thread in a synchronous way.
def doRegisterInDB(self, txn):
# Auto create tables.
# This is off by default, as it gives warnings in log file
# about tables already existing (and I did not find the issue
# important enough to find a better fix).
if self.autoCreateTables:
txn.execute("""
CREATE TABLE IF NOT EXISTS test_run(
id INT PRIMARY KEY AUTO_INCREMENT,
branch VARCHAR(100),
revision VARCHAR(32) NOT NULL,
platform VARCHAR(100) NOT NULL,
dt TIMESTAMP NOT NULL,
bbnum INT NOT NULL,
typ VARCHAR(32) NOT NULL,
info VARCHAR(255),
KEY (branch, revision),
KEY (dt),
KEY (platform, bbnum)
) ENGINE=innodb
""")
txn.execute("""
CREATE TABLE IF NOT EXISTS test_failure(
test_run_id INT NOT NULL,
test_name VARCHAR(100) NOT NULL,
test_variant VARCHAR(16) NOT NULL,
info_text VARCHAR(255),
failure_text TEXT,
PRIMARY KEY (test_run_id, test_name, test_variant)
) ENGINE=innodb
""")
txn.execute("""
CREATE TABLE IF NOT EXISTS test_warnings(
test_run_id INT NOT NULL,
list_id INT NOT NULL,
list_idx INT NOT NULL,
test_name VARCHAR(100) NOT NULL,
PRIMARY KEY (test_run_id, list_id, list_idx)
) ENGINE=innodb
""")
revision = self.getProperty("got_revision")
if revision is None:
revision = self.getProperty("revision")
typ = "mtr"
if self.test_type:
typ = self.test_type
txn.execute("""
INSERT INTO test_run(branch, revision, platform, dt, bbnum, typ, info)
VALUES (%s, %s, %s, CURRENT_TIMESTAMP(), %s, %s, %s)
""", (self.getProperty("branch"), revision,
self.getProperty("buildername"), self.getProperty("buildnumber"),
typ, self.test_info))
return txn.lastrowid
def afterRegisterInDB(self, insert_id):
self.setProperty("mtr_id", insert_id)
self.setProperty("mtr_warn_id", 0)
Test.start(self)
def reportError(self, err):
log.msg("Error in async insert into database: %s" % err)
class MyMtrLogObserver(MtrLogObserver):
def collectTestFail(self, testname, variant, result, info, text):
# Insert asynchronously into database.
dbpool = self.step.dbpool
if dbpool is None:
return defer.succeed(None)
run_id = self.step.getProperty("mtr_id")
if variant is None:
variant = ""
d = self.step.runQueryWithRetry("""
INSERT INTO test_failure(test_run_id, test_name, test_variant, info_text, failure_text)
VALUES (%s, %s, %s, %s, %s)
""", (run_id, testname, variant, info, text))
d.addErrback(self.step.reportError)
return d
def collectWarningTests(self, testList):
# Insert asynchronously into database.
dbpool = self.step.dbpool
if dbpool is None:
return defer.succeed(None)
run_id = self.step.getProperty("mtr_id")
warn_id = self.step.getProperty("mtr_warn_id")
self.step.setProperty("mtr_warn_id", warn_id + 1)
q = ("INSERT INTO test_warnings(test_run_id, list_id, list_idx, test_name) " +
"VALUES " + ", ".join(map(lambda x: "(%s, %s, %s, %s)", testList)))
v = []
idx = 0
for t in testList:
v.extend([run_id, warn_id, idx, t])
idx = idx + 1
d = self.step.runQueryWithRetry(q, tuple(v))
d.addErrback(self.step.reportError)
return d
|
Lekensteyn/buildbot
|
master/buildbot/steps/mtrlogobserver.py
|
Python
|
gpl-2.0
| 18,336
|
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Julien Veyssier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
from weboob.capabilities.base import UserError
from weboob.capabilities.calendar import CapCalendarEvent, CATEGORIES, BaseCalendarEvent
from weboob.capabilities.video import CapVideo, BaseVideo
from weboob.capabilities.collection import CapCollection, CollectionNotFound, Collection
from weboob.capabilities.cinema import CapCinema, Person, Movie
from weboob.tools.backend import Module
from .browser import AllocineBrowser
__all__ = ['AllocineModule']
class AllocineModule(Module, CapCinema, CapVideo, CapCalendarEvent, CapCollection):
NAME = 'allocine'
MAINTAINER = u'Julien Veyssier'
EMAIL = 'julien.veyssier@aiur.fr'
VERSION = '1.1'
DESCRIPTION = u'AlloCiné French cinema database service'
LICENSE = 'AGPLv3+'
BROWSER = AllocineBrowser
ASSOCIATED_CATEGORIES = [CATEGORIES.CINE]
def get_movie(self, id):
return self.browser.get_movie(id)
def get_person(self, id):
return self.browser.get_person(id)
def iter_movies(self, pattern):
return self.browser.iter_movies(pattern.encode('utf-8'))
def iter_persons(self, pattern):
return self.browser.iter_persons(pattern.encode('utf-8'))
def iter_movie_persons(self, id, role=None):
return self.browser.iter_movie_persons(id, role)
def iter_person_movies(self, id, role=None):
return self.browser.iter_person_movies(id, role)
def iter_person_movies_ids(self, id):
return self.browser.iter_person_movies_ids(id)
def iter_movie_persons_ids(self, id):
return self.browser.iter_movie_persons_ids(id)
def get_person_biography(self, id):
return self.browser.get_person_biography(id)
def get_movie_releases(self, id, country=None):
return self.browser.get_movie_releases(id, country)
def fill_person(self, person, fields):
if 'real_name' in fields or 'birth_place' in fields\
or 'death_date' in fields or 'nationality' in fields\
or 'short_biography' in fields or 'roles' in fields\
or 'birth_date' in fields or 'thumbnail_url' in fields\
or 'biography' in fields\
or 'gender' in fields or fields is None:
per = self.get_person(person.id)
person.real_name = per.real_name
person.birth_date = per.birth_date
person.death_date = per.death_date
person.birth_place = per.birth_place
person.gender = per.gender
person.nationality = per.nationality
person.short_biography = per.short_biography
person.short_description = per.short_description
person.roles = per.roles
person.biography = per.biography
person.thumbnail_url = per.thumbnail_url
return person
def fill_movie(self, movie, fields):
if 'other_titles' in fields or 'release_date' in fields\
or 'duration' in fields or 'country' in fields\
or 'roles' in fields or 'note' in fields\
or 'thumbnail_url' in fields:
mov = self.get_movie(movie.id)
movie.other_titles = mov.other_titles
movie.release_date = mov.release_date
movie.duration = mov.duration
movie.pitch = mov.pitch
movie.country = mov.country
movie.note = mov.note
movie.roles = mov.roles
movie.genres = mov.genres
movie.short_description = mov.short_description
movie.thumbnail_url = mov.thumbnail_url
if 'all_release_dates' in fields:
movie.all_release_dates = self.get_movie_releases(movie.id)
return movie
def fill_video(self, video, fields):
if 'url' in fields:
with self.browser:
if not isinstance(video, BaseVideo):
video = self.get_video(self, video.id)
if hasattr(video, '_video_code'):
video.url = unicode(self.browser.get_video_url(video._video_code))
if 'thumbnail' in fields and video and video.thumbnail:
with self.browser:
video.thumbnail.data = self.browser.readurl(video.thumbnail.url)
return video
def get_video(self, _id):
with self.browser:
split_id = _id.split('#')
if split_id[-1] == 'movie':
return self.browser.get_movie_from_id(split_id[0])
return self.browser.get_video_from_id(split_id[0], split_id[-1])
def iter_resources(self, objs, split_path):
with self.browser:
if BaseVideo in objs:
collection = self.get_collection(objs, split_path)
if collection.path_level == 0:
yield Collection([u'comingsoon'], u'Films prochainement au cinéma')
yield Collection([u'nowshowing'], u'Films au cinéma')
yield Collection([u'acshow'], u'Émissions')
yield Collection([u'interview'], u'Interviews')
if collection.path_level == 1:
if collection.basename == u'acshow':
emissions = self.browser.get_emissions(collection.basename)
if emissions:
for emission in emissions:
yield emission
elif collection.basename == u'interview':
videos = self.browser.get_categories_videos(collection.basename)
if videos:
for video in videos:
yield video
else:
videos = self.browser.get_categories_movies(collection.basename)
if videos:
for video in videos:
yield video
if collection.path_level == 2:
videos = self.browser.get_categories_videos(':'.join(collection.split_path))
if videos:
for video in videos:
yield video
def validate_collection(self, objs, collection):
if collection.path_level == 0:
return
if collection.path_level == 1 and (collection.basename in
[u'comingsoon', u'nowshowing', u'acshow', u'interview']):
return
if collection.path_level == 2 and collection.parent_path == [u'acshow']:
return
raise CollectionNotFound(collection.split_path)
def search_events(self, query):
with self.browser:
if CATEGORIES.CINE in query.categories:
if query.city and re.match('\d{5}', query.city):
events = list(self.browser.search_events(query))
events.sort(key=lambda x: x.start_date, reverse=False)
return events
raise UserError('You must enter a zip code in city field')
def get_event(self, id):
return self.browser.get_event(id)
def fill_event(self, event, fields):
if 'description' in fields:
movieCode = event.id.split('#')[2]
movie = self.get_movie(movieCode)
event.description = movie.pitch
return event
OBJECTS = {
Person: fill_person,
Movie: fill_movie,
BaseVideo: fill_video,
BaseCalendarEvent: fill_event
}
|
frankrousseau/weboob
|
modules/allocine/module.py
|
Python
|
agpl-3.0
| 8,218
|
"""Support for Z-Wave sensors."""
from homeassistant.components.sensor import DEVICE_CLASS_BATTERY, DOMAIN, SensorEntity
from homeassistant.const import DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity, const
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Sensor from Config Entry."""
@callback
def async_add_sensor(sensor):
"""Add Z-Wave Sensor."""
async_add_entities([sensor])
async_dispatcher_connect(hass, "zwave_new_sensor", async_add_sensor)
def get_device(node, values, **kwargs):
"""Create Z-Wave entity device."""
# Generic Device mappings
if values.primary.command_class == const.COMMAND_CLASS_BATTERY:
return ZWaveBatterySensor(values)
if node.has_command_class(const.COMMAND_CLASS_SENSOR_MULTILEVEL):
return ZWaveMultilevelSensor(values)
if (
node.has_command_class(const.COMMAND_CLASS_METER)
and values.primary.type == const.TYPE_DECIMAL
):
return ZWaveMultilevelSensor(values)
if node.has_command_class(const.COMMAND_CLASS_ALARM) or node.has_command_class(
const.COMMAND_CLASS_SENSOR_ALARM
):
return ZWaveAlarmSensor(values)
return None
class ZWaveSensor(ZWaveDeviceEntity, SensorEntity):
"""Representation of a Z-Wave sensor."""
def __init__(self, values):
"""Initialize the sensor."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self.update_properties()
def update_properties(self):
"""Handle the data changes for node values."""
self._state = self.values.primary.data
self._units = self.values.primary.units
@property
def force_update(self):
"""Return force_update."""
return True
@property
def native_value(self):
"""Return the state of the sensor."""
return self._state
@property
def native_unit_of_measurement(self):
"""Return the unit of measurement the value is expressed in."""
return self._units
class ZWaveMultilevelSensor(ZWaveSensor):
"""Representation of a multi level sensor Z-Wave sensor."""
@property
def native_value(self):
"""Return the state of the sensor."""
if self._units in ("C", "F"):
return round(self._state, 1)
if isinstance(self._state, float):
return round(self._state, 2)
return self._state
@property
def device_class(self):
"""Return the class of this device."""
if self._units in ["C", "F"]:
return DEVICE_CLASS_TEMPERATURE
return None
@property
def native_unit_of_measurement(self):
"""Return the unit the value is expressed in."""
if self._units == "C":
return TEMP_CELSIUS
if self._units == "F":
return TEMP_FAHRENHEIT
return self._units
class ZWaveAlarmSensor(ZWaveSensor):
"""Representation of a Z-Wave sensor that sends Alarm alerts.
Examples include certain Multisensors that have motion and vibration
capabilities. Z-Wave defines various alarm types such as Smoke, Flood,
Burglar, CarbonMonoxide, etc.
This wraps these alarms and allows you to use them to trigger things, etc.
COMMAND_CLASS_ALARM is what we get here.
"""
class ZWaveBatterySensor(ZWaveSensor):
"""Representation of Z-Wave device battery level."""
@property
def device_class(self):
"""Return the class of this device."""
return DEVICE_CLASS_BATTERY
|
jawilson/home-assistant
|
homeassistant/components/zwave/sensor.py
|
Python
|
apache-2.0
| 3,679
|
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
from pysandesh_example.gen_py.vn.ttypes import VirtualNetwork, VNInfo, VirtualNetworkResp, \
VirtualNetworkAll, VNStats, VirtualNetworkAllResp
def VirtualNetwork_handle_request(self, sandesh):
vn_stats = VNStats(in_pkts=10, out_pkts=20, in_bytes=1024, out_bytes=2048)
vms = ['vm1', 'vm2', 'vm3']
if not self.name:
vn_name = 'vn1'
else:
vn_name = self.name
if not self.id:
vn_id = 100
else:
vn_id = self.id
vn_info = VNInfo(vn_name, vn_id, vms, vn_stats)
vn_resp = VirtualNetworkResp(vn_info)
vn_resp.response(self._context)
#end VirtualNetwork_handle_request
def VirtualNetworkAll_handle_request(self, sandesh):
range_min = 1
range_max = 10
for i in range(1,10):
vn_stats = VNStats(in_pkts=i*10, out_pkts=i*20,
in_bytes=i*64, out_bytes=i*128)
vms = []
for j in range(1, 4):
vms.append('vm'+str(j*i))
vn_info = VNInfo('VN'+str(i), i, vms, vn_stats)
vn_resp = VirtualNetworkAllResp(vn_info)
if i != (range_max-1):
vn_resp.response(self._context, True)
else:
vn_resp.response(self._context, False)
#end VirtualNetworkAll_handle_request
def bind_handle_request_impl():
VirtualNetwork.handle_request = VirtualNetwork_handle_request
VirtualNetworkAll.handle_request = VirtualNetworkAll_handle_request
|
vpramo/contrail-sandesh
|
library/python/pysandesh/example/pysandesh_example/sandesh_req_impl.py
|
Python
|
apache-2.0
| 1,469
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-07-17 21:06:43
import time
import mysql.connector
from pyspider.database.base.projectdb import ProjectDB as BaseProjectDB
from pyspider.database.basedb import BaseDB
from .mysqlbase import MySQLMixin
class ProjectDB(MySQLMixin, BaseProjectDB, BaseDB):
__tablename__ = 'projectdb'
def __init__(self, host='localhost', port=3306, database='projectdb',
user='root', passwd=None):
self.database_name = database
self.conn = mysql.connector.connect(user=user, password=passwd,
host=host, port=port, autocommit=True)
if database not in [x[0] for x in self._execute('show databases')]:
self._execute('CREATE DATABASE %s' % self.escape(database))
self.conn.database = database
self._execute('''CREATE TABLE IF NOT EXISTS %s (
`name` varchar(64) PRIMARY KEY,
`group` varchar(64),
`status` varchar(16),
`script` TEXT,
`comments` varchar(1024),
`rate` float(11, 4),
`burst` float(11, 4),
`updatetime` double(16, 4)
) ENGINE=MyISAM CHARSET=utf8''' % self.escape(self.__tablename__))
def insert(self, name, obj={}):
obj = dict(obj)
obj['name'] = name
obj['updatetime'] = time.time()
return self._insert(**obj)
def update(self, name, obj={}, **kwargs):
obj = dict(obj)
obj.update(kwargs)
obj['updatetime'] = time.time()
ret = self._update(where="`name` = %s" % self.placeholder, where_values=(name, ), **obj)
return ret.rowcount
def get_all(self, fields=None):
return self._select2dic(what=fields)
def get(self, name, fields=None):
where = "`name` = %s" % self.placeholder
for each in self._select2dic(what=fields, where=where, where_values=(name, )):
return each
return None
def drop(self, name):
where = "`name` = %s" % self.placeholder
return self._delete(where=where, where_values=(name, ))
def check_update(self, timestamp, fields=None):
where = "`updatetime` >= %f" % timestamp
return self._select2dic(what=fields, where=where)
|
shanezhiu/pyspider
|
pyspider/database/mysql/projectdb.py
|
Python
|
apache-2.0
| 2,401
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing `LinearOperator` and sub-classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import six
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.platform import test
class OperatorBuildInfo(object):
"""Object encoding expected shape for a test.
Encodes the expected shape of a matrix for a test. Also
allows additional metadata for the test harness.
"""
def __init__(self, shape, **kwargs):
self.shape = shape
self.__dict__.update(kwargs)
@six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init
class LinearOperatorDerivedClassTest(test.TestCase):
"""Tests for derived classes.
Subclasses should implement every abstractmethod, and this will enable all
test methods to work.
"""
# Absolute/relative tolerance for tests.
_atol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
_rtol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
def assertAC(self, x, y):
"""Derived classes can set _atol, _rtol to get different tolerance."""
dtype = dtypes.as_dtype(x.dtype)
atol = self._atol[dtype]
rtol = self._rtol[dtype]
self.assertAllClose(x, y, atol=atol, rtol=rtol)
@property
def _adjoint_options(self):
return [False, True]
@property
def _adjoint_arg_options(self):
return [False, True]
@property
def _dtypes_to_test(self):
# TODO(langmore) Test tf.float16 once tf.matrix_solve works in 16bit.
return [dtypes.float32, dtypes.float64, dtypes.complex64, dtypes.complex128]
@property
def _use_placeholder_options(self):
return [False, True]
@abc.abstractproperty
def _operator_build_infos(self):
"""Returns list of OperatorBuildInfo, encapsulating the shape to test."""
raise NotImplementedError("operator_build_infos has not been implemented.")
@abc.abstractmethod
def _operator_and_mat_and_feed_dict(self, build_info, dtype, use_placeholder):
"""Build a batch matrix and an Operator that should have similar behavior.
Every operator acts like a (batch) matrix. This method returns both
together, and is used by tests.
Args:
build_info: `OperatorBuildInfo`, encoding shape information about the
operator.
dtype: Numpy dtype. Data type of returned array/operator.
use_placeholder: Python bool. If True, initialize the operator with a
placeholder of undefined shape and correct dtype.
Returns:
operator: `LinearOperator` subclass instance.
mat: `Tensor` representing operator.
feed_dict: Dictionary.
If placholder is True, this must contains everything needed to be fed
to sess.run calls at runtime to make the operator work.
"""
# Create a matrix as a numpy array with desired shape/dtype.
# Create a LinearOperator that should have the same behavior as the matrix.
raise NotImplementedError("Not implemented yet.")
@abc.abstractmethod
def _make_rhs(self, operator, adjoint, with_batch=True):
"""Make a rhs appropriate for calling operator.solve(rhs).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making a 'rhs' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `rhs` with the same batch
shape as operator, and otherwise create a matrix without any batch
shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("_make_rhs is not defined.")
@abc.abstractmethod
def _make_x(self, operator, adjoint, with_batch=True):
"""Make an 'x' appropriate for calling operator.matmul(x).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making an 'x' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `x` with the same batch shape
as operator, and otherwise create a matrix without any batch shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("_make_x is not defined.")
@property
def _tests_to_skip(self):
"""List of test names to skip."""
# Subclasses should over-ride if they want to skip some tests.
# To skip "test_foo", add "foo" to this list.
return []
def _skip_if_tests_to_skip_contains(self, test_name):
"""If self._tests_to_skip contains test_name, raise SkipTest exception.
See tests below for usage.
Args:
test_name: String name corresponding to a test.
Raises:
SkipTest Exception, if test_name is in self._tests_to_skip.
"""
if test_name in self._tests_to_skip:
self.skipTest(
"{} skipped because it was added to self._tests_to_skip.".format(
test_name))
def test_to_dense(self):
self._skip_if_tests_to_skip_contains("to_dense")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
op_dense = operator.to_dense()
if not use_placeholder:
self.assertAllEqual(build_info.shape, op_dense.get_shape())
op_dense_v, mat_v = sess.run([op_dense, mat], feed_dict=feed_dict)
self.assertAC(op_dense_v, mat_v)
def test_det(self):
self._skip_if_tests_to_skip_contains("det")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
op_det = operator.determinant()
if not use_placeholder:
self.assertAllEqual(build_info.shape[:-2], op_det.get_shape())
op_det_v, mat_det_v = sess.run(
[op_det, linalg_ops.matrix_determinant(mat)],
feed_dict=feed_dict)
self.assertAC(op_det_v, mat_det_v)
def test_log_abs_det(self):
self._skip_if_tests_to_skip_contains("log_abs_det")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
op_log_abs_det = operator.log_abs_determinant()
_, mat_log_abs_det = linalg.slogdet(mat)
if not use_placeholder:
self.assertAllEqual(
build_info.shape[:-2], op_log_abs_det.get_shape())
op_log_abs_det_v, mat_log_abs_det_v = sess.run(
[op_log_abs_det, mat_log_abs_det], feed_dict=feed_dict)
self.assertAC(op_log_abs_det_v, mat_log_abs_det_v)
def _test_matmul(self, with_batch):
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(build_info.shape) <= 2:
continue
for dtype in self._dtypes_to_test:
for adjoint in self._adjoint_options:
for adjoint_arg in self._adjoint_arg_options:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
x = self._make_x(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, compute A X^H^H = A X.
if adjoint_arg:
op_matmul = operator.matmul(
linalg.adjoint(x),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_matmul = operator.matmul(x, adjoint=adjoint)
mat_matmul = linear_operator_util.matmul_with_broadcast(
mat, x, adjoint_a=adjoint)
if not use_placeholder:
self.assertAllEqual(op_matmul.get_shape(),
mat_matmul.get_shape())
op_matmul_v, mat_matmul_v = sess.run(
[op_matmul, mat_matmul], feed_dict=feed_dict)
self.assertAC(op_matmul_v, mat_matmul_v)
def test_matmul(self):
self._skip_if_tests_to_skip_contains("matmul")
self._test_matmul(with_batch=True)
def test_matmul_with_broadcast(self):
self._skip_if_tests_to_skip_contains("matmul_with_broadcast")
self._test_matmul(with_batch=False)
def _test_solve(self, with_batch):
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(build_info.shape) <= 2:
continue
for dtype in self._dtypes_to_test:
for adjoint in self._adjoint_options:
for adjoint_arg in self._adjoint_arg_options:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
rhs = self._make_rhs(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, solve A X = (rhs^H)^H = rhs.
if adjoint_arg:
op_solve = operator.solve(
linalg.adjoint(rhs),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_solve = operator.solve(
rhs, adjoint=adjoint, adjoint_arg=adjoint_arg)
mat_solve = linear_operator_util.matrix_solve_with_broadcast(
mat, rhs, adjoint=adjoint)
if not use_placeholder:
self.assertAllEqual(op_solve.get_shape(),
mat_solve.get_shape())
op_solve_v, mat_solve_v = sess.run(
[op_solve, mat_solve], feed_dict=feed_dict)
self.assertAC(op_solve_v, mat_solve_v)
def test_solve(self):
self._skip_if_tests_to_skip_contains("solve")
self._test_solve(with_batch=True)
def test_solve_with_broadcast(self):
self._skip_if_tests_to_skip_contains("solve_with_broadcast")
self._test_solve(with_batch=False)
def test_trace(self):
self._skip_if_tests_to_skip_contains("trace")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
op_trace = operator.trace()
mat_trace = math_ops.trace(mat)
if not use_placeholder:
self.assertAllEqual(op_trace.get_shape(), mat_trace.get_shape())
op_trace_v, mat_trace_v = sess.run(
[op_trace, mat_trace], feed_dict=feed_dict)
self.assertAC(op_trace_v, mat_trace_v)
def test_add_to_tensor(self):
self._skip_if_tests_to_skip_contains("add_to_tensor")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
op_plus_2mat = operator.add_to_tensor(2 * mat)
if not use_placeholder:
self.assertAllEqual(build_info.shape, op_plus_2mat.get_shape())
op_plus_2mat_v, mat_v = sess.run(
[op_plus_2mat, mat], feed_dict=feed_dict)
self.assertAC(op_plus_2mat_v, 3 * mat_v)
def test_diag_part(self):
self._skip_if_tests_to_skip_contains("diag_part")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat, feed_dict = self._operator_and_mat_and_feed_dict(
build_info, dtype, use_placeholder=use_placeholder)
op_diag_part = operator.diag_part()
mat_diag_part = array_ops.matrix_diag_part(mat)
if not use_placeholder:
self.assertAllEqual(mat_diag_part.get_shape(),
op_diag_part.get_shape())
op_diag_part_, mat_diag_part_ = sess.run(
[op_diag_part, mat_diag_part], feed_dict=feed_dict)
self.assertAC(op_diag_part_, mat_diag_part_)
@six.add_metaclass(abc.ABCMeta)
class SquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for square operators.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@property
def _operator_build_infos(self):
build_info = OperatorBuildInfo
# non-batch operators (n, n) and batch operators.
return [
build_info((0, 0)),
build_info((1, 1)),
build_info((1, 3, 3)),
build_info((3, 4, 4)),
build_info((2, 1, 4, 4))]
def _make_rhs(self, operator, adjoint, with_batch=True):
# This operator is square, so rhs and x will have same shape.
# adjoint value makes no difference because the operator shape doesn't
# change since it is square, but be pedantic.
return self._make_x(operator, adjoint=not adjoint, with_batch=with_batch)
def _make_x(self, operator, adjoint, with_batch=True):
# Value of adjoint makes no difference because the operator is square.
# Return the number of systems to solve, R, equal to 1 or 2.
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, N, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
@six.add_metaclass(abc.ABCMeta)
class NonSquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for generic rectangular operators.
Square shapes are never tested by this class, so if you want to test your
operator with a square shape, create two test classes, the other subclassing
SquareLinearOperatorFullMatrixTest.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@property
def _tests_to_skip(self):
"""List of test names to skip."""
return ["solve", "solve_with_broadcast", "det", "log_abs_det"]
@property
def _operator_build_infos(self):
build_info = OperatorBuildInfo
# non-batch operators (n, n) and batch operators.
return [
build_info((2, 1)),
build_info((1, 2)),
build_info((1, 3, 2)),
build_info((3, 3, 4)),
build_info((2, 1, 2, 4))]
def _make_rhs(self, operator, adjoint, with_batch=True):
# TODO(langmore) Add once we're testing solve_ls.
raise NotImplementedError(
"_make_rhs not implemented because we don't test solve")
def _make_x(self, operator, adjoint, with_batch=True):
# Return the number of systems for the argument 'x' for .matmul(x)
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, M, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
if adjoint:
n = operator.range_dimension.value
else:
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
if adjoint:
n = operator.range_dimension_tensor()
else:
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
def random_positive_definite_matrix(shape, dtype, force_well_conditioned=False):
"""[batch] positive definite matrix.
Args:
shape: `TensorShape` or Python list. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype.
force_well_conditioned: Python bool. If `True`, returned matrix has
eigenvalues with modulus in `(1, 4)`. Otherwise, eigenvalues are
chi-squared random variables.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
if not tensor_util.is_tensor(shape):
shape = tensor_shape.TensorShape(shape)
# Matrix must be square.
shape[-1].assert_is_compatible_with(shape[-2])
with ops.name_scope("random_positive_definite_matrix"):
tril = random_tril_matrix(
shape, dtype, force_well_conditioned=force_well_conditioned)
return math_ops.matmul(tril, tril, adjoint_b=True)
def random_tril_matrix(shape,
dtype,
force_well_conditioned=False,
remove_upper=True):
"""[batch] lower triangular matrix.
Args:
shape: `TensorShape` or Python `list`. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype
force_well_conditioned: Python `bool`. If `True`, returned matrix will have
eigenvalues with modulus in `(1, 2)`. Otherwise, eigenvalues are unit
normal random variables.
remove_upper: Python `bool`.
If `True`, zero out the strictly upper triangle.
If `False`, the lower triangle of returned matrix will have desired
properties, but will not have the strictly upper triangle zero'd out.
Returns:
`Tensor` with desired shape and dtype.
"""
with ops.name_scope("random_tril_matrix"):
# Totally random matrix. Has no nice properties.
tril = random_normal(shape, dtype=dtype)
if remove_upper:
tril = array_ops.matrix_band_part(tril, -1, 0)
# Create a diagonal with entries having modulus in [1, 2].
if force_well_conditioned:
maxval = ops.convert_to_tensor(np.sqrt(2.), dtype=dtype.real_dtype)
diag = random_sign_uniform(
shape[:-1], dtype=dtype, minval=1., maxval=maxval)
tril = array_ops.matrix_set_diag(tril, diag)
return tril
def random_normal(shape, mean=0.0, stddev=1.0, dtype=dtypes.float32, seed=None):
"""Tensor with (possibly complex) Gaussian entries.
Samples are distributed like
```
N(mean, stddev^2), if dtype is real,
X + iY, where X, Y ~ N(mean, stddev^2) if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_normal"):
samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 1234
more_samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) Uniform entries.
Samples are distributed like
```
Uniform[minval, maxval], if dtype is real,
X + iY, where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_uniform"):
samples = random_ops.random_uniform(
shape, dtype=dtype.real_dtype, minval=minval, maxval=maxval, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 12345
more_samples = random_ops.random_uniform(
shape,
dtype=dtype.real_dtype,
minval=minval,
maxval=maxval,
seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_sign_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) random entries from a "sign Uniform".
Letting `Z` be a random variable equal to `-1` and `1` with equal probability,
Samples from this `Op` are distributed like
```
Z * X, where X ~ Uniform[minval, maxval], if dtype is real,
Z * (X + iY), where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_sign_uniform"):
unsigned_samples = random_uniform(
shape, minval=minval, maxval=maxval, dtype=dtype, seed=seed)
if seed is not None:
seed += 12
signs = math_ops.sign(
random_ops.random_uniform(shape, minval=-1., maxval=1., seed=seed))
return unsigned_samples * math_ops.cast(signs, unsigned_samples.dtype)
def random_normal_correlated_columns(shape,
mean=0.0,
stddev=1.0,
dtype=dtypes.float32,
eps=1e-4,
seed=None):
"""Batch matrix with (possibly complex) Gaussian entries and correlated cols.
Returns random batch matrix `A` with specified element-wise `mean`, `stddev`,
living close to an embedded hyperplane.
Suppose `shape[-2:] = (M, N)`.
If `M < N`, `A` is a random `M x N` [batch] matrix with iid Gaussian entries.
If `M >= N`, then the colums of `A` will be made almost dependent as follows:
```
L = random normal N x N-1 matrix, mean = 0, stddev = 1 / sqrt(N - 1)
B = random normal M x N-1 matrix, mean = 0, stddev = stddev.
G = (L B^H)^H, a random normal M x N matrix, living on N-1 dim hyperplane
E = a random normal M x N matrix, mean = 0, stddev = eps
mu = a constant M x N matrix, equal to the argument "mean"
A = G + E + mu
```
Args:
shape: Python list of integers.
Shape of the returned tensor. Must be at least length two.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
eps: Distance each column is perturbed from the low-dimensional subspace.
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
Raises:
ValueError: If `shape` is not at least length 2.
"""
dtype = dtypes.as_dtype(dtype)
if len(shape) < 2:
raise ValueError(
"Argument shape must be at least length 2. Found: %s" % shape)
# Shape is the final shape, e.g. [..., M, N]
shape = list(shape)
batch_shape = shape[:-2]
m, n = shape[-2:]
# If there is only one column, "they" are by definition correlated.
if n < 2 or n < m:
return random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed)
# Shape of the matrix with only n - 1 columns that we will embed in higher
# dimensional space.
smaller_shape = batch_shape + [m, n - 1]
# Shape of the embedding matrix, mapping batch matrices
# from [..., N-1, M] to [..., N, M]
embedding_mat_shape = batch_shape + [n, n - 1]
# This stddev for the embedding_mat ensures final result has correct stddev.
stddev_mat = 1 / np.sqrt(n - 1)
with ops.name_scope("random_normal_correlated_columns"):
smaller_mat = random_normal(
smaller_shape, mean=0.0, stddev=stddev_mat, dtype=dtype, seed=seed)
if seed is not None:
seed += 1287
embedding_mat = random_normal(embedding_mat_shape, dtype=dtype, seed=seed)
embedded_t = math_ops.matmul(embedding_mat, smaller_mat, transpose_b=True)
embedded = array_ops.matrix_transpose(embedded_t)
mean_mat = array_ops.ones_like(embedded) * mean
return embedded + random_normal(shape, stddev=eps, dtype=dtype) + mean_mat
|
nburn42/tensorflow
|
tensorflow/python/ops/linalg/linear_operator_test_util.py
|
Python
|
apache-2.0
| 28,767
|
"""Define constants for the SimpliSafe component."""
from datetime import timedelta
DOMAIN = "simplisafe"
DATA_CLIENT = "client"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=30)
TOPIC_UPDATE = "update"
|
fbradyirl/home-assistant
|
homeassistant/components/simplisafe/const.py
|
Python
|
apache-2.0
| 203
|
# $Id: stp.py 23 2006-11-08 15:45:33Z dugsong $
# -*- coding: utf-8 -*-
"""Spanning Tree Protocol."""
import dpkt
class STP(dpkt.Packet):
__hdr__ = (
('proto_id', 'H', 0),
('v', 'B', 0),
('type', 'B', 0),
('flags', 'B', 0),
('root_id', '8s', ''),
('root_path', 'I', 0),
('bridge_id', '8s', ''),
('port_id', 'H', 0),
('_age', 'H', 0),
('_max_age', 'H', 0),
('_hello', 'H', 0),
('_fd', 'H', 0)
)
@property
def age(self):
return self._age >> 8
@age.setter
def age(self, age):
self._age = age << 8
@property
def max_age(self):
return self._max_age >> 8
@max_age.setter
def max_age(self, max_age):
self._max_age = max_age << 8
@property
def hello(self):
return self._hello >> 8
@hello.setter
def hello(self, hello):
self._hello = hello << 8
@property
def fd(self):
return self._fd >> 8
@fd.setter
def fd(self, fd):
self._fd = fd << 8
def test_stp():
buf = '\x00\x00\x02\x02\x3e\x80\x00\x08\x00\x27\xad\xa3\x41\x00\x00\x00\x00\x80\x00\x08\x00\x27\xad\xa3\x41\x80\x01\x00\x00\x14\x00\x02\x00\x0f\x00\x00\x00\x00\x00\x02\x00\x14\x00'
stp = STP(buf)
assert stp.proto_id == 0
assert stp.port_id == 0x8001
assert stp.age == 0
assert stp.max_age == 20
assert stp.hello == 2
assert stp.fd == 15
assert str(stp) == buf
stp.fd = 100
assert stp.pack_hdr()[-2:] == '\x64\x00' # 100 << 8
if __name__ == '__main__':
# Runs all the test associated with this class/file
test_stp()
print 'Tests Successful...'
|
lkash/test
|
dpkt/stp.py
|
Python
|
bsd-3-clause
| 1,695
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
# Test import of new-style email module names.
# This should work on Python 2.5+
from email import utils
from email.header import Header
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
|
bl4ckdu5t/registron
|
tests/basic/test_email.py
|
Python
|
mit
| 681
|
# Natural Language Toolkit: Decision Tree Classifiers
#
# Copyright (C) 2001-2017 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
A classifier model that decides which label to assign to a token on
the basis of a tree structure, where branches correspond to conditions
on feature values, and leaves correspond to label assignments.
"""
from __future__ import print_function, unicode_literals, division
from collections import defaultdict
from nltk.probability import FreqDist, MLEProbDist, entropy
from nltk.classify.api import ClassifierI
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class DecisionTreeClassifier(ClassifierI):
def __init__(self, label, feature_name=None, decisions=None, default=None):
"""
:param label: The most likely label for tokens that reach
this node in the decision tree. If this decision tree
has no children, then this label will be assigned to
any token that reaches this decision tree.
:param feature_name: The name of the feature that this
decision tree selects for.
:param decisions: A dictionary mapping from feature values
for the feature identified by ``feature_name`` to
child decision trees.
:param default: The child that will be used if the value of
feature ``feature_name`` does not match any of the keys in
``decisions``. This is used when constructing binary
decision trees.
"""
self._label = label
self._fname = feature_name
self._decisions = decisions
self._default = default
def labels(self):
labels = [self._label]
if self._decisions is not None:
for dt in self._decisions.values():
labels.extend(dt.labels())
if self._default is not None:
labels.extend(self._default.labels())
return list(set(labels))
def classify(self, featureset):
# Decision leaf:
if self._fname is None:
return self._label
# Decision tree:
fval = featureset.get(self._fname)
if fval in self._decisions:
return self._decisions[fval].classify(featureset)
elif self._default is not None:
return self._default.classify(featureset)
else:
return self._label
def error(self, labeled_featuresets):
errors = 0
for featureset, label in labeled_featuresets:
if self.classify(featureset) != label:
errors += 1
return errors/len(labeled_featuresets)
def pretty_format(self, width=70, prefix='', depth=4):
"""
Return a string containing a pretty-printed version of this
decision tree. Each line in this string corresponds to a
single decision tree node or leaf, and indentation is used to
display the structure of the decision tree.
"""
# [xx] display default!!
if self._fname is None:
n = width-len(prefix)-15
return '{0}{1} {2}\n'.format(prefix, '.'*n, self._label)
s = ''
for i, (fval, result) in enumerate(sorted(self._decisions.items())):
hdr = '{0}{1}={2}? '.format(prefix, self._fname, fval)
n = width-15-len(hdr)
s += '{0}{1} {2}\n'.format(hdr, '.'*(n), result._label)
if result._fname is not None and depth>1:
s += result.pretty_format(width, prefix+' ', depth-1)
if self._default is not None:
n = width-len(prefix)-21
s += '{0}else: {1} {2}\n'.format(prefix, '.'*n, self._default._label)
if self._default._fname is not None and depth>1:
s += self._default.pretty_format(width, prefix+' ', depth-1)
return s
def pseudocode(self, prefix='', depth=4):
"""
Return a string representation of this decision tree that
expresses the decisions it makes as a nested set of pseudocode
if statements.
"""
if self._fname is None:
return "{0}return {1!r}\n".format(prefix, self._label)
s = ''
for (fval, result) in sorted(self._decisions.items()):
s += '{0}if {1} == {2!r}: '.format(prefix, self._fname, fval)
if result._fname is not None and depth>1:
s += '\n'+result.pseudocode(prefix+' ', depth-1)
else:
s += 'return {0!r}\n'.format(result._label)
if self._default is not None:
if len(self._decisions) == 1:
s += '{0}if {1} != {2!r}: '.format(prefix, self._fname,
list(self._decisions.keys())[0])
else:
s += '{0}else: '.format(prefix)
if self._default._fname is not None and depth>1:
s += '\n'+self._default.pseudocode(prefix+' ', depth-1)
else:
s += 'return {0!r}\n'.format(self._default._label)
return s
def __str__(self):
return self.pretty_format()
@staticmethod
def train(labeled_featuresets, entropy_cutoff=0.05, depth_cutoff=100,
support_cutoff=10, binary=False, feature_values=None,
verbose=False):
"""
:param binary: If true, then treat all feature/value pairs as
individual binary features, rather than using a single n-way
branch for each feature.
"""
# Collect a list of all feature names.
feature_names = set()
for featureset, label in labeled_featuresets:
for fname in featureset:
feature_names.add(fname)
# Collect a list of the values each feature can take.
if feature_values is None and binary:
feature_values = defaultdict(set)
for featureset, label in labeled_featuresets:
for fname, fval in featureset.items():
feature_values[fname].add(fval)
# Start with a stump.
if not binary:
tree = DecisionTreeClassifier.best_stump(
feature_names, labeled_featuresets, verbose)
else:
tree = DecisionTreeClassifier.best_binary_stump(
feature_names, labeled_featuresets, feature_values, verbose)
# Refine the stump.
tree.refine(labeled_featuresets, entropy_cutoff, depth_cutoff-1,
support_cutoff, binary, feature_values, verbose)
# Return it
return tree
@staticmethod
def leaf(labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
return DecisionTreeClassifier(label)
@staticmethod
def stump(feature_name, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
# Find the best label for each value.
freqs = defaultdict(FreqDist) # freq(label|value)
for featureset, label in labeled_featuresets:
feature_value = featureset.get(feature_name)
freqs[feature_value][label] += 1
decisions = dict((val, DecisionTreeClassifier(freqs[val].max()))
for val in freqs)
return DecisionTreeClassifier(label, feature_name, decisions)
def refine(self, labeled_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary=False, feature_values=None,
verbose=False):
if len(labeled_featuresets) <= support_cutoff: return
if self._fname is None: return
if depth_cutoff <= 0: return
for fval in self._decisions:
fval_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) == fval]
label_freqs = FreqDist(label for (featureset, label)
in fval_featuresets)
if entropy(MLEProbDist(label_freqs)) > entropy_cutoff:
self._decisions[fval] = DecisionTreeClassifier.train(
fval_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary, feature_values, verbose)
if self._default is not None:
default_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) not in
self._decisions]
label_freqs = FreqDist(label for (featureset, label)
in default_featuresets)
if entropy(MLEProbDist(label_freqs)) > entropy_cutoff:
self._default = DecisionTreeClassifier.train(
default_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary, feature_values, verbose)
@staticmethod
def best_stump(feature_names, labeled_featuresets, verbose=False):
best_stump = DecisionTreeClassifier.leaf(labeled_featuresets)
best_error = best_stump.error(labeled_featuresets)
for fname in feature_names:
stump = DecisionTreeClassifier.stump(fname, labeled_featuresets)
stump_error = stump.error(labeled_featuresets)
if stump_error < best_error:
best_error = stump_error
best_stump = stump
if verbose:
print(('best stump for {:6d} toks uses {:20} err={:6.4f}'.format \
(len(labeled_featuresets), best_stump._fname, best_error)))
return best_stump
@staticmethod
def binary_stump(feature_name, feature_value, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
# Find the best label for each value.
pos_fdist = FreqDist()
neg_fdist = FreqDist()
for featureset, label in labeled_featuresets:
if featureset.get(feature_name) == feature_value:
pos_fdist[label] += 1
else:
neg_fdist[label] += 1
decisions = {}
default = label
# But hopefully we have observations!
if pos_fdist.N() > 0:
decisions = {feature_value: DecisionTreeClassifier(pos_fdist.max())}
if neg_fdist.N() > 0:
default = DecisionTreeClassifier(neg_fdist.max())
return DecisionTreeClassifier(label, feature_name, decisions, default)
@staticmethod
def best_binary_stump(feature_names, labeled_featuresets, feature_values,
verbose=False):
best_stump = DecisionTreeClassifier.leaf(labeled_featuresets)
best_error = best_stump.error(labeled_featuresets)
for fname in feature_names:
for fval in feature_values[fname]:
stump = DecisionTreeClassifier.binary_stump(
fname, fval, labeled_featuresets)
stump_error = stump.error(labeled_featuresets)
if stump_error < best_error:
best_error = stump_error
best_stump = stump
if best_stump._decisions:
descr = '{0}={1}'.format(best_stump._fname,
list(best_stump._decisions.keys())[0])
else:
descr = '(default)'
if verbose:
print(('best stump for {:6d} toks uses {:20} err={:6.4f}'.format \
(len(labeled_featuresets), descr, best_error)))
return best_stump
##//////////////////////////////////////////////////////
## Demo
##//////////////////////////////////////////////////////
def f(x):
return DecisionTreeClassifier.train(x, binary=True, verbose=True)
def demo():
from nltk.classify.util import names_demo, binary_names_demo_features
classifier = names_demo(f, #DecisionTreeClassifier.train,
binary_names_demo_features)
print(classifier.pp(depth=7))
print(classifier.pseudocode(depth=7))
if __name__ == '__main__':
demo()
|
sdoran35/hate-to-hugs
|
venv/lib/python3.6/site-packages/nltk/classify/decisiontree.py
|
Python
|
mit
| 12,314
|
#!/usr/bin/env python
import glob
import os
import sys
sys.path.insert(0, os.pardir)
from testing_harness import TestHarness
from openmc.statepoint import StatePoint
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* files have been created."""
statepoint = glob.glob(os.path.join(os.getcwd(), 'statepoint.*'))
assert len(statepoint) == 5, '5 statepoint files must exist.'
assert statepoint[0].endswith('h5'), \
'Statepoint file is not a HDF5 file.'
def _get_results(self):
"""Digest info in the statepoint and return as a string."""
# Read the statepoint file.
statepoint = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0]
sp = StatePoint(statepoint)
# Get the eigenvalue information.
outstr = TestHarness._get_results(self)
# Add the source information.
xyz = sp.source[0]['xyz']
outstr += ' '.join(['{0:12.6E}'.format(x) for x in xyz])
outstr += "\n"
return outstr
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.08.*')
harness.main()
|
kellyrowland/openmc
|
tests/test_sourcepoint_interval/test_sourcepoint_interval.py
|
Python
|
mit
| 1,176
|
import logging
from django.core.management.base import BaseCommand
from readthedocs.projects import tasks
from readthedocs.api.client import api
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Build documentation using the API and not hitting a database.
Usage::
./manage.py update_api <slug>
"""
def add_arguments(self, parser):
parser.add_argument('--docker', action='store_true', default=False)
parser.add_argument('projects', nargs='+', type=str)
def handle(self, *args, **options):
docker = options.get('docker', False)
for slug in options['projects']:
project_data = api.project(slug).get()
p = tasks.make_api_project(project_data)
log.info("Building %s" % p)
tasks.update_docs.run(pk=p.pk, docker=docker)
|
SteveViss/readthedocs.org
|
readthedocs/core/management/commands/update_api.py
|
Python
|
mit
| 851
|
import unittest
from chainer import cuda
from chainer import initializers
from chainer import testing
from chainer.testing import attr
import numpy
@testing.parameterize(*testing.product({
'target': [
initializers.Uniform,
initializers.LeCunUniform,
initializers.HeUniform,
initializers.GlorotUniform,
],
'shape': [(2, 3), (2, 3, 4)],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
class TestUniform(unittest.TestCase):
scale = 0.1
def check_initializer(self, w):
initializer = self.target(scale=self.scale)
initializer(w)
self.assertTupleEqual(w.shape, self.shape)
self.assertEqual(w.dtype, self.dtype)
def test_initializer_cpu(self):
w = numpy.empty(self.shape, dtype=self.dtype)
self.check_initializer(w)
@attr.gpu
def test_initializer_gpu(self):
w = cuda.cupy.empty(self.shape, dtype=self.dtype)
self.check_initializer(w)
def check_shaped_initializer(self, xp):
initializer = self.target(scale=self.scale, dtype=self.dtype)
w = initializers.generate_array(initializer, self.shape, xp)
self.assertIs(cuda.get_array_module(w), xp)
self.assertTupleEqual(w.shape, self.shape)
self.assertEqual(w.dtype, self.dtype)
def test_shaped_initializer_cpu(self):
self.check_shaped_initializer(numpy)
@attr.gpu
def test_shaped_initializer_gpu(self):
self.check_shaped_initializer(cuda.cupy)
testing.run_module(__name__, __file__)
|
kiyukuta/chainer
|
tests/chainer_tests/initializer_tests/test_uniform.py
|
Python
|
mit
| 1,548
|
from test import support
from test.support import bigaddrspacetest, MAX_Py_ssize_t
import unittest
import operator
import sys
class StrTest(unittest.TestCase):
@bigaddrspacetest
def test_concat(self):
s1 = 'x' * MAX_Py_ssize_t
self.assertRaises(OverflowError, operator.add, s1, '?')
@bigaddrspacetest
def test_optimized_concat(self):
x = 'x' * MAX_Py_ssize_t
try:
x = x + '?' # this statement uses a fast path in ceval.c
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
try:
x += '?' # this statement uses a fast path in ceval.c
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
self.assertEquals(len(x), MAX_Py_ssize_t)
### the following test is pending a patch
# (http://mail.python.org/pipermail/python-dev/2006-July/067774.html)
#@bigaddrspacetest
#def test_repeat(self):
# self.assertRaises(OverflowError, operator.mul, 'x', MAX_Py_ssize_t + 1)
def test_main():
support.run_unittest(StrTest)
if __name__ == '__main__':
if len(sys.argv) > 1:
support.set_memlimit(sys.argv[1])
test_main()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.1/Lib/test/test_bigaddrspace.py
|
Python
|
mit
| 1,284
|
from searchv2.tests.test_builders import *
from searchv2.tests.test_models import *
from searchv2.tests.test_utils import *
from searchv2.tests.test_views import *
|
pydanny/djangopackages
|
searchv2/tests/__init__.py
|
Python
|
mit
| 163
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from __future__ import print_function
import fixpath
import colorama
from colorama import Fore, Back, Style
from random import randint, choice
from string import printable
# Fore, Back and Style are convenience classes for the constant ANSI strings that set
# the foreground, background and style. The don't have any magic of their own.
FORES = [ Fore.BLACK, Fore.RED, Fore.GREEN, Fore.YELLOW, Fore.BLUE, Fore.MAGENTA, Fore.CYAN, Fore.WHITE ]
BACKS = [ Back.BLACK, Back.RED, Back.GREEN, Back.YELLOW, Back.BLUE, Back.MAGENTA, Back.CYAN, Back.WHITE ]
STYLES = [ Style.DIM, Style.NORMAL, Style.BRIGHT ]
# This assumes your terminal is 80x24. Ansi minimum coordinate is (1,1).
MINY, MAXY = 1, 24
MINX, MAXX = 1, 80
# set of printable ASCII characters, including a space.
CHARS = ' ' + printable.strip()
PASSES = 1000
def main():
colorama.init()
# gratuitous use of lambda.
pos = lambda y, x: '\x1b[%d;%dH' % (y, x)
# draw a white border.
print(Back.WHITE, end='')
print('%s%s' % (pos(MINY, MINX), ' '*MAXX), end='')
for y in range(MINY, 1+MAXY):
print('%s %s ' % (pos(y, MINX), pos(y, MAXX)), end='')
print('%s%s' % (pos(MAXY, MINX), ' '*MAXX), end='')
# draw some blinky lights for a while.
for i in range(PASSES):
print('%s%s%s%s%s' % (pos(randint(1+MINY,MAXY-1), randint(1+MINX,MAXX-1)), choice(FORES), choice(BACKS), choice(STYLES), choice(CHARS)), end='')
# put cursor to top, left, and set color to white-on-black with normal brightness.
print('%s%s%s%s' % (pos(MINY, MINX), Fore.WHITE, Back.BLACK, Style.NORMAL), end='')
if __name__ == '__main__':
main()
|
Teamxrtc/webrtc-streaming-node
|
third_party/webrtc/src/chromium/src/third_party/colorama/src/demos/demo06.py
|
Python
|
mit
| 1,711
|
import json
import unittest
import mock
from django.http import HttpResponseBadRequest
from base import (assert_auth_CREATE, assert_auth_READ, assert_auth_UPDATE, assert_auth_DELETE,
assert_auth_EXECUTE)
from pulp.server.exceptions import InvalidValue, MissingResource, MissingValue, OperationPostponed
from pulp.server.managers.consumer.group import query
from pulp.server.webservices.views import util
from pulp.server.webservices.views.consumer_groups import (serialize,
ConsumerGroupAssociateActionView,
ConsumerGroupBindingView,
ConsumerGroupBindingsView,
ConsumerGroupContentActionView,
ConsumerGroupResourceView,
ConsumerGroupSearchView,
ConsumerGroupUnassociateActionView,
ConsumerGroupView,)
class TestconsumerGroupView(unittest.TestCase):
"""
Test consumer groups view.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_all_consumer_groups(self, mock_collection, mock_resp):
"""
Test the consumer groups retrieval.
"""
consumer_mock = mock.MagicMock()
resp = [{'id': 'foo', 'display_name': 'bar'}]
consumer_mock.find.return_value = resp
mock_collection.return_value = consumer_mock
request = mock.MagicMock()
consumer_group = ConsumerGroupView()
response = consumer_group.get(request)
expected_cont = [{'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}]
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.generate_redirect_response')
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_create_consumer_group(self, mock_factory, mock_resp, mock_redirect):
"""
Test consumer group creation.
"""
resp = {'id': 'foo', 'display_name': 'bar'}
expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}
request = mock.MagicMock()
request.body = json.dumps({'id': 'foo', 'display_name': 'bar'})
mock_factory.consumer_group_manager.return_value.create_consumer_group.return_value = resp
consumer_group = ConsumerGroupView()
response = consumer_group.post(request)
mock_resp.assert_called_once_with(expected_cont)
mock_redirect.assert_called_once_with(mock_resp.return_value, expected_cont['_href'])
self.assertTrue(response is mock_redirect.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_create_consumer_group_invalid_param(self):
"""
Test consumer group creation with invalid parameters.
"""
request = mock.MagicMock()
request.body = json.dumps({'id': 'foo', 'display_name': 'bar', 'invalid_param': 'some'})
consumer_group = ConsumerGroupView()
try:
response = consumer_group.post(request)
except InvalidValue, response:
pass
else:
raise AssertionError("Invalidvalue should be raised with invalid options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['invalid_param'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_create_consumer_group_missing_param(self):
"""
Test consumer group creation with missing required group id.
"""
request = mock.MagicMock()
request.body = json.dumps({'display_name': 'bar'})
consumer_group = ConsumerGroupView()
try:
response = consumer_group.post(request)
except MissingValue, response:
pass
else:
raise AssertionError("MissingValue should be raised with missing options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['id'])
class TestconsumerGroupResourceView(unittest.TestCase):
"""
Test consumer groups resource view.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.generate_json_response')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_delete_consumer_group_resource(self, mock_factory, mock_resp):
"""
Test consumer group delete resource.
"""
mock_group_manager = mock.MagicMock()
mock_factory.consumer_group_manager.return_value = mock_group_manager
mock_group_manager.delete_consumer_group.return_value = None
request = mock.MagicMock()
consumer_group_resource = ConsumerGroupResourceView()
response = consumer_group_resource.delete(request, 'test-group')
mock_group_manager.delete_consumer_group.assert_called_once_with('test-group')
mock_resp.assert_called_once_with(None)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_consumer_group_resource(self, mock_collection, mock_resp):
"""
Test single consumer group retrieval.
"""
consumer_mock = mock.MagicMock()
consumer_mock.find_one.return_value = {'id': 'foo'}
mock_collection.return_value = consumer_mock
request = mock.MagicMock()
consumer_group = ConsumerGroupResourceView()
response = consumer_group.get(request, 'foo')
expected_cont = {'id': 'foo', '_href': '/v2/consumer_groups/foo/'}
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_invalid_consumer_group_resource(self, mock_collection):
"""
Test nonexistent consumer group retrieval.
"""
mock_collection.return_value.find_one.return_value = None
request = mock.MagicMock()
consumer_group = ConsumerGroupResourceView()
try:
response = consumer_group.get(request, 'nonexistent_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent_group")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'consumer_group': 'nonexistent_id'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_UPDATE())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_update_consumer_group(self, mock_factory, mock_resp):
"""
Test consumer group update.
"""
resp = {'id': 'foo', 'display_name': 'bar'}
expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}
request = mock.MagicMock()
request.body = json.dumps({'display_name': 'bar'})
mock_factory.consumer_group_manager.return_value.update_consumer_group.return_value = resp
consumer_group = ConsumerGroupResourceView()
response = consumer_group.put(request, 'foo')
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
class TestConsumerGroupSearchView(unittest.TestCase):
"""
Tests for ConsumerGroupSearchView.
"""
def test_class_attributes(self):
"""
Ensure that class attributes are set correctly.
"""
consumer_group_search = ConsumerGroupSearchView()
self.assertTrue(isinstance(consumer_group_search.manager, query.ConsumerGroupQueryManager))
self.assertEqual(consumer_group_search.response_builder,
util.generate_json_response_with_pulp_encoder)
self.assertEqual(consumer_group_search.serializer, serialize)
class TestConsumerGroupAssociateActionView(unittest.TestCase):
"""
Tests consumer group membership.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_EXECUTE())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_cons_group_association_view(self, mock_factory, mock_resp):
"""
Test consumer group associate a consumer.
"""
grp = {'id': 'my-group', 'consumer_ids': ['c1']}
mock_factory.consumer_group_manager.return_value.associate.return_value = 'ok'
mock_factory.consumer_group_query_manager.return_value.get_group.return_value = grp
request = mock.MagicMock()
request.body = json.dumps({'criteria': {'filters': {'id': 'c1'}}})
consumer_group_associate = ConsumerGroupAssociateActionView()
response = consumer_group_associate.post(request, 'my-group')
mock_resp.assert_called_once_with(['c1'])
self.assertTrue(response is mock_resp.return_value)
class TestConsumerGroupUnassociateActionView(unittest.TestCase):
"""
Tests consumer group membership.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_EXECUTE())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_cons_group_unassociation_view(self, mock_factory, mock_resp):
"""
Test consumer group unassociate a consumer.
"""
grp = {'id': 'my-group', 'consumer_ids': []}
mock_factory.consumer_group_manager.return_value.unassociate.return_value = 'ok'
mock_factory.consumer_group_query_manager.return_value.get_group.return_value = grp
request = mock.MagicMock()
request.body = json.dumps({'criteria': {'filters': {'id': 'c1'}}})
consumer_group_unassociate = ConsumerGroupUnassociateActionView()
response = consumer_group_unassociate.post(request, 'my-group')
mock_resp.assert_called_once_with([])
self.assertTrue(response is mock_resp.return_value)
class TestConsumerGroupBindingsView(unittest.TestCase):
"""
Represents consumer group binding.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects')
def test_verify_group_resources_repo(self, mock_repo_qs, mock_dist_qs, mock_factory):
"""
Test verify group resources with repo missing.
"""
mock_factory.consumer_group_query_manager.return_value.get_group.return_value = 'test-group'
mock_repo_qs().first.return_value = None
mock_dist_qs.get_or_404.side_effect = MissingResource
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("InvalidValue should be raised with nonexistent resources")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['repo_id', 'distributor_id'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects')
def test_verify_group_resources_distributor(self, mock_repo_qs, mock_dist_qs, mock_f):
"""
Test verify group resources with distributor missing.
"""
mock_f.consumer_group_query_manager.return_value.get_group.return_value = 'test'
mock_repo_qs.first.return_value = 'xxx'
mock_dist_qs.get_or_404.side_effect = MissingResource
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("InvalidValue should be raised with nonexistent resources")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['distributor_id'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_verify_group_resources_group(self, mock_f, mock_dist_qs, mock_repo_qs):
"""
Test verify group resources with group id missing.
"""
mock_f.consumer_group_query_manager.return_value.get_group.side_effect = MissingResource
mock_repo_qs.first.return_value = 'xxx'
mock_dist_qs.get_or_404.return_value = 'yyy'
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent resources")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'group_id': 'test-group'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.bind')
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding(self, mock_resources, mock_bind):
"""
Test bind consumer group to a repo.
"""
mock_resources.return_value = {}
request = mock.MagicMock()
request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'})
bind_view = ConsumerGroupBindingsView()
self.assertRaises(OperationPostponed, bind_view.post, request, 'test-group')
bind_args_tuple = ('test-group', 'xxx', 'yyy', True, None, {})
mock_bind.apply_async.assert_called_once_with(bind_args_tuple)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding_with_missing_group_id(self, mock_resources):
"""
Test bind consumer group to a repo when group id missing.
"""
mock_resources.return_value = {'group_id': 'nonexistent_id'}
request = mock.MagicMock()
request.body = json.dumps({})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'nonexistent_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent_group")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'group_id': 'nonexistent_id'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding_with_missing_repo_id(self, mock_resources):
"""
Test bind consumer group to a repo when repo id is missing.
"""
mock_resources.return_value = {'repo_id': 'nonexistent_id'}
request = mock.MagicMock()
request.body = json.dumps({})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("InvalidValue should be raised with nonexistent_repo")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['repo_id'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_create_binding_with_invalid_param(self, mock_resources):
"""
Test bind consumer group to a repo witn invalid parameters.
"""
mock_resources.return_value = {'invalid_param': 'foo'}
request = mock.MagicMock()
request.body = json.dumps({})
bind_view = ConsumerGroupBindingsView()
try:
response = bind_view.post(request, 'test-group')
except InvalidValue, response:
pass
else:
raise AssertionError("Invalidvalue should be raised with invalid options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['invalid_param'])
class TestConsumerGroupBindingView(unittest.TestCase):
"""
Represents a specific consumer group binding.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.unbind')
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_delete_binding(self, mock_resources, mock_unbind):
"""
Test consumer group binding removal.
"""
mock_resources.return_value = {}
request = mock.MagicMock()
unbind_view = ConsumerGroupBindingView()
self.assertRaises(OperationPostponed, unbind_view.delete, request,
"consumer_group_id", "repo_id", "distributor_id")
unbind_args_tuple = ("consumer_group_id", "repo_id", "distributor_id", {})
mock_unbind.apply_async.assert_called_once_with(unbind_args_tuple)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources')
def test_delete_non_existent_binding(self, mock_resources):
"""
Test consumer group nonexistent binding removal.
"""
mock_resources.return_value = {'repo_id': 'no_such_repo'}
request = mock.MagicMock()
unbind_view = ConsumerGroupBindingView()
try:
response = unbind_view.delete(request, 'test-group', 'no_such_repo', 'dist_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with missing options")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'repo_id': 'no_such_repo'})
class TestConsumerGroupContentActionView(unittest.TestCase):
"""
Test Consumer group content manipulation.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_consumer_group_bad_request_content(self):
"""
Test consumer group invalid content action.
"""
request = mock.MagicMock()
request.body = json.dumps('')
consumer_group_content = ConsumerGroupContentActionView()
response = consumer_group_content.post(request, 'my-group', 'no_such_action')
self.assertTrue(isinstance(response, HttpResponseBadRequest))
self.assertEqual(response.status_code, 400)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_consumer_group_content_install(self, mock_factory):
"""
Test consumer group content installation.
"""
mock_factory.consumer_group_manager.return_value.install_content.return_value = 'ok'
request = mock.MagicMock()
request.body = json.dumps({"units": [], "options": {}})
consumer_group_content = ConsumerGroupContentActionView()
self.assertRaises(OperationPostponed, consumer_group_content.post, request,
'my-group', 'install')
mock_factory.consumer_group_manager().install_content.assert_called_once_with(
'my-group', [], {})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_consumer_group_content_update(self, mock_factory):
"""
Test consumer group content update.
"""
mock_factory.consumer_group_manager.return_value.update_content.return_value = 'ok'
request = mock.MagicMock()
request.body = json.dumps({"units": [], "options": {}})
consumer_group_content = ConsumerGroupContentActionView()
self.assertRaises(OperationPostponed, consumer_group_content.post, request,
'my-group', 'update')
mock_factory.consumer_group_manager().update_content.assert_called_once_with(
'my-group', [], {})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_consumer_group_content_uninstall(self, mock_factory):
"""
Test consumer group content uninstall.
"""
mock_factory.consumer_group_manager.return_value.uninstall_content.return_value = 'ok'
request = mock.MagicMock()
request.body = json.dumps({"units": [], "options": {}})
consumer_group_content = ConsumerGroupContentActionView()
self.assertRaises(OperationPostponed, consumer_group_content.post, request,
'my-group', 'uninstall')
mock_factory.consumer_group_manager().uninstall_content.assert_called_once_with(
'my-group', [], {})
|
ulif/pulp
|
server/test/unit/server/webservices/views/test_consumer_groups.py
|
Python
|
gpl-2.0
| 24,673
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fnmatch
import os
import re
import itertools
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.inventory.data import InventoryData
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins.loader import inventory_loader
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
IGNORED_ALWAYS = [br"^\.", b"^host_vars$", b"^group_vars$", b"^vars_plugins$"]
IGNORED_PATTERNS = [to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS]
IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS]
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
def order_patterns(patterns):
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
# FIXME: this goes away if we apply patterns incrementally or by groups
pattern_regular = []
pattern_intersection = []
pattern_exclude = []
for p in patterns:
if p.startswith("!"):
pattern_exclude.append(p)
elif p.startswith("&"):
pattern_intersection.append(p)
elif p:
pattern_regular.append(p)
# if no regular pattern was given, hence only exclude and/or intersection
# make that magically work
if pattern_regular == []:
pattern_regular = ['all']
# when applying the host selectors, run those without the "&" or "!"
# first, then the &s, then the !s.
return pattern_regular + pattern_intersection + pattern_exclude
def split_host_pattern(pattern):
"""
Takes a string containing host patterns separated by commas (or a list
thereof) and returns a list of single patterns (which may not contain
commas). Whitespace is ignored.
Also accepts ':' as a separator for backwards compatibility, but it is
not recommended due to the conflict with IPv6 addresses and host ranges.
Example: 'a,b[1], c[2:3] , d' -> ['a', 'b[1]', 'c[2:3]', 'd']
"""
if isinstance(pattern, list):
return list(itertools.chain(*map(split_host_pattern, pattern)))
elif not isinstance(pattern, string_types):
pattern = to_native(pattern)
# If it's got commas in it, we'll treat it as a straightforward
# comma-separated list of patterns.
if ',' in pattern:
patterns = pattern.split(',')
# If it doesn't, it could still be a single pattern. This accounts for
# non-separator uses of colons: IPv6 addresses and [x:y] host ranges.
else:
try:
(base, port) = parse_address(pattern, allow_ranges=True)
patterns = [pattern]
except Exception:
# The only other case we accept is a ':'-separated list of patterns.
# This mishandles IPv6 addresses, and is retained only for backwards
# compatibility.
patterns = re.findall(
r'''(?: # We want to match something comprising:
[^\s:\[\]] # (anything other than whitespace or ':[]'
| # ...or...
\[[^\]]*\] # a single complete bracketed expression)
)+ # occurring once or more
''', pattern, re.X
)
return [p.strip() for p in patterns]
class InventoryManager(object):
''' Creates and manages inventory '''
def __init__(self, loader, sources=None):
# base objects
self._loader = loader
self._inventory = InventoryData()
# a list of host(names) to contain current inquiries to
self._restriction = None
self._subset = None
# caches
self._hosts_patterns_cache = {} # resolved full patterns
self._pattern_cache = {} # resolved individual patterns
self._inventory_plugins = [] # for generating inventory
# the inventory dirs, files, script paths or lists of hosts
if sources is None:
self._sources = []
elif isinstance(sources, string_types):
self._sources = [sources]
else:
self._sources = sources
# get to work!
self.parse_sources(cache=True)
@property
def localhost(self):
return self._inventory.localhost
@property
def groups(self):
return self._inventory.groups
@property
def hosts(self):
return self._inventory.hosts
def get_vars(self, *args, **kwargs):
return self._inventory.get_vars(args, kwargs)
def add_host(self, host, group=None, port=None):
return self._inventory.add_host(host, group, port)
def add_group(self, group):
return self._inventory.add_group(group)
def get_groups_dict(self):
return self._inventory.get_groups_dict()
def reconcile_inventory(self):
self.clear_caches()
return self._inventory.reconcile_inventory()
def get_host(self, hostname):
return self._inventory.get_host(hostname)
def _setup_inventory_plugins(self):
''' sets up loaded inventory plugins for usage '''
display.vvvv('setting up inventory plugins')
for name in C.INVENTORY_ENABLED:
plugin = inventory_loader.get(name)
if plugin:
plugin.set_options()
self._inventory_plugins.append(plugin)
else:
display.warning('Failed to load inventory plugin, skipping %s' % name)
if not self._inventory_plugins:
raise AnsibleError("No inventory plugins available to generate inventory, make sure you have at least one whitelisted.")
def parse_sources(self, cache=False):
''' iterate over inventory sources and parse each one to populate it'''
self._setup_inventory_plugins()
parsed = False
# allow for multiple inventory parsing
for source in self._sources:
if source:
if ',' not in source:
source = unfrackpath(source, follow=False)
parse = self.parse_source(source, cache=cache)
if parse and not parsed:
parsed = True
if parsed:
# do post processing
self._inventory.reconcile_inventory()
else:
display.warning("No inventory was parsed, only implicit localhost is available")
self._inventory_plugins = []
def parse_source(self, source, cache=False):
''' Generate or update inventory for the source provided '''
parsed = False
display.debug(u'Examining possible inventory source: %s' % source)
b_source = to_bytes(source)
# process directories as a collection of inventories
if os.path.isdir(b_source):
display.debug(u'Searching for inventory files in directory: %s' % source)
for i in sorted(os.listdir(b_source)):
display.debug(u'Considering %s' % i)
# Skip hidden files and stuff we explicitly ignore
if IGNORED.search(i):
continue
# recursively deal with directory entries
fullpath = os.path.join(b_source, i)
parsed_this_one = self.parse_source(to_native(fullpath), cache=cache)
display.debug(u'parsed %s as %s' % (fullpath, parsed_this_one))
if not parsed:
parsed = parsed_this_one
else:
# left with strings or files, let plugins figure it out
# set so new hosts can use for inventory_file/dir vasr
self._inventory.current_source = source
# get inventory plugins if needed, there should always be at least one generator
if not self._inventory_plugins:
self._setup_inventory_plugins()
# try source with each plugin
failures = []
for plugin in self._inventory_plugins:
plugin_name = to_native(getattr(plugin, '_load_name', getattr(plugin, '_original_path', '')))
display.debug(u'Attempting to use plugin %s (%s)' % (plugin_name, plugin._original_path))
# initialize
if plugin.verify_file(source):
try:
# in case plugin fails 1/2 way we dont want partial inventory
plugin.parse(self._inventory, self._loader, source, cache=cache)
parsed = True
display.vvv('Parsed %s inventory source with %s plugin' % (to_text(source), plugin_name))
break
except AnsibleParserError as e:
display.debug('%s was not parsable by %s' % (to_text(source), plugin_name))
failures.append({'src': source, 'plugin': plugin_name, 'exc': e})
except Exception as e:
display.debug('%s failed to parse %s' % (plugin_name, to_text(source)))
failures.append({'src': source, 'plugin': plugin_name, 'exc': e})
else:
display.debug('%s did not meet %s requirements' % (to_text(source), plugin_name))
else:
if not parsed and failures:
# only if no plugin processed files should we show errors.
if C.INVENTORY_UNPARSED_IS_FAILED:
msg = "Could not parse inventory source %s with available plugins:\n" % source
for fail in failures:
msg += 'Plugin %s failed: %s\n' % (fail['plugin'], to_native(fail['exc']))
if display.verbosity >= 3:
msg += "%s\n" % fail['exc'].tb
raise AnsibleParserError(msg)
else:
for fail in failures:
display.warning(u'\n* Failed to parse %s with %s plugin: %s' % (to_text(fail['src']), fail['plugin'], to_text(fail['exc'])))
if hasattr(fail['exc'], 'tb'):
display.vvv(to_text(fail['exc'].tb))
if not parsed:
display.warning("Unable to parse %s as an inventory source" % to_text(source))
# clear up, jic
self._inventory.current_source = None
return parsed
def clear_caches(self):
''' clear all caches '''
self._hosts_patterns_cache = {}
self._pattern_cache = {}
# FIXME: flush inventory cache
def refresh_inventory(self):
''' recalculate inventory '''
self.clear_caches()
self._inventory = InventoryData()
self.parse_sources(cache=False)
def _match_list(self, items, pattern_str):
# compile patterns
try:
if not pattern_str.startswith('~'):
pattern = re.compile(fnmatch.translate(pattern_str))
else:
pattern = re.compile(pattern_str[1:])
except Exception:
raise AnsibleError('Invalid host list pattern: %s' % pattern_str)
# apply patterns
results = []
for item in items:
if pattern.match(item):
results.append(item)
return results
def get_hosts(self, pattern="all", ignore_limits=False, ignore_restrictions=False, order=None):
"""
Takes a pattern or list of patterns and returns a list of matching
inventory host names, taking into account any active restrictions
or applied subsets
"""
# Check if pattern already computed
if isinstance(pattern, list):
pattern_hash = u":".join(pattern)
else:
pattern_hash = pattern
if not ignore_limits and self._subset:
pattern_hash += ":%s" % to_native(self._subset)
if not ignore_restrictions and self._restriction:
pattern_hash += ":%s" % to_native(self._restriction)
if pattern_hash not in self._hosts_patterns_cache:
patterns = split_host_pattern(pattern)
hosts = self._evaluate_patterns(patterns)
# mainly useful for hostvars[host] access
if not ignore_limits and self._subset:
# exclude hosts not in a subset, if defined
subset = self._evaluate_patterns(self._subset)
hosts = [h for h in hosts if h in subset]
if not ignore_restrictions and self._restriction:
# exclude hosts mentioned in any restriction (ex: failed hosts)
hosts = [h for h in hosts if h.name in self._restriction]
seen = set()
self._hosts_patterns_cache[pattern_hash] = [x for x in hosts if x not in seen and not seen.add(x)]
# sort hosts list if needed (should only happen when called from strategy)
if order in ['sorted', 'reverse_sorted']:
from operator import attrgetter
hosts = sorted(self._hosts_patterns_cache[pattern_hash][:], key=attrgetter('name'), reverse=(order == 'reverse_sorted'))
elif order == 'reverse_inventory':
hosts = sorted(self._hosts_patterns_cache[pattern_hash][:], reverse=True)
else:
hosts = self._hosts_patterns_cache[pattern_hash][:]
if order == 'shuffle':
from random import shuffle
shuffle(hosts)
elif order not in [None, 'inventory']:
AnsibleOptionsError("Invalid 'order' specified for inventory hosts: %s" % order)
return hosts
def _evaluate_patterns(self, patterns):
"""
Takes a list of patterns and returns a list of matching host names,
taking into account any negative and intersection patterns.
"""
patterns = order_patterns(patterns)
hosts = []
for p in patterns:
# avoid resolving a pattern that is a plain host
if p in self._inventory.hosts:
hosts.append(self._inventory.get_host(p))
else:
that = self._match_one_pattern(p)
if p.startswith("!"):
hosts = [h for h in hosts if h not in frozenset(that)]
elif p.startswith("&"):
hosts = [h for h in hosts if h in frozenset(that)]
else:
hosts.extend([h for h in that if h.name not in frozenset([y.name for y in hosts])])
return hosts
def _match_one_pattern(self, pattern):
"""
Takes a single pattern and returns a list of matching host names.
Ignores intersection (&) and exclusion (!) specifiers.
The pattern may be:
1. A regex starting with ~, e.g. '~[abc]*'
2. A shell glob pattern with ?/*/[chars]/[!chars], e.g. 'foo*'
3. An ordinary word that matches itself only, e.g. 'foo'
The pattern is matched using the following rules:
1. If it's 'all', it matches all hosts in all groups.
2. Otherwise, for each known group name:
(a) if it matches the group name, the results include all hosts
in the group or any of its children.
(b) otherwise, if it matches any hosts in the group, the results
include the matching hosts.
This means that 'foo*' may match one or more groups (thus including all
hosts therein) but also hosts in other groups.
The built-in groups 'all' and 'ungrouped' are special. No pattern can
match these group names (though 'all' behaves as though it matches, as
described above). The word 'ungrouped' can match a host of that name,
and patterns like 'ungr*' and 'al*' can match either hosts or groups
other than all and ungrouped.
If the pattern matches one or more group names according to these rules,
it may have an optional range suffix to select a subset of the results.
This is allowed only if the pattern is not a regex, i.e. '~foo[1]' does
not work (the [1] is interpreted as part of the regex), but 'foo*[1]'
would work if 'foo*' matched the name of one or more groups.
Duplicate matches are always eliminated from the results.
"""
if pattern.startswith("&") or pattern.startswith("!"):
pattern = pattern[1:]
if pattern not in self._pattern_cache:
(expr, slice) = self._split_subscript(pattern)
hosts = self._enumerate_matches(expr)
try:
hosts = self._apply_subscript(hosts, slice)
except IndexError:
raise AnsibleError("No hosts matched the subscripted pattern '%s'" % pattern)
self._pattern_cache[pattern] = hosts
return self._pattern_cache[pattern]
def _split_subscript(self, pattern):
"""
Takes a pattern, checks if it has a subscript, and returns the pattern
without the subscript and a (start,end) tuple representing the given
subscript (or None if there is no subscript).
Validates that the subscript is in the right syntax, but doesn't make
sure the actual indices make sense in context.
"""
# Do not parse regexes for enumeration info
if pattern.startswith('~'):
return (pattern, None)
# We want a pattern followed by an integer or range subscript.
# (We can't be more restrictive about the expression because the
# fnmatch semantics permit [\[:\]] to occur.)
pattern_with_subscript = re.compile(
r'''^
(.+) # A pattern expression ending with...
\[(?: # A [subscript] expression comprising:
(-?[0-9]+)| # A single positive or negative number
([0-9]+)([:-]) # Or an x:y or x: range.
([0-9]*)
)\]
$
''', re.X
)
subscript = None
m = pattern_with_subscript.match(pattern)
if m:
(pattern, idx, start, sep, end) = m.groups()
if idx:
subscript = (int(idx), None)
else:
if not end:
end = -1
subscript = (int(start), int(end))
if sep == '-':
display.warning("Use [x:y] inclusive subscripts instead of [x-y] which has been removed")
return (pattern, subscript)
def _apply_subscript(self, hosts, subscript):
"""
Takes a list of hosts and a (start,end) tuple and returns the subset of
hosts based on the subscript (which may be None to return all hosts).
"""
if not hosts or not subscript:
return hosts
(start, end) = subscript
if end:
if end == -1:
end = len(hosts) - 1
return hosts[start:end + 1]
else:
return [hosts[start]]
def _enumerate_matches(self, pattern):
"""
Returns a list of host names matching the given pattern according to the
rules explained above in _match_one_pattern.
"""
results = []
# check if pattern matches group
matching_groups = self._match_list(self._inventory.groups, pattern)
if matching_groups:
for groupname in matching_groups:
results.extend(self._inventory.groups[groupname].get_hosts())
# check hosts if no groups matched or it is a regex/glob pattern
if not matching_groups or pattern.startswith('~') or any(special in pattern for special in ('.', '?', '*', '[')):
# pattern might match host
matching_hosts = self._match_list(self._inventory.hosts, pattern)
if matching_hosts:
for hostname in matching_hosts:
results.append(self._inventory.hosts[hostname])
if not results and pattern in C.LOCALHOST:
# get_host autocreates implicit when needed
implicit = self._inventory.get_host(pattern)
if implicit:
results.append(implicit)
if not results and pattern != 'all':
display.warning("Could not match supplied host pattern, ignoring: %s" % pattern)
return results
def list_hosts(self, pattern="all"):
""" return a list of hostnames for a pattern """
# FIXME: cache?
result = [h for h in self.get_hosts(pattern)]
# allow implicit localhost if pattern matches and no other results
if len(result) == 0 and pattern in C.LOCALHOST:
result = [pattern]
return result
def list_groups(self):
# FIXME: cache?
return sorted(self._inventory.groups.keys(), key=lambda x: x)
def restrict_to_hosts(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
to batch serial operations in main playbook code, don't use this for other
reasons.
"""
if restriction is None:
return
elif not isinstance(restriction, list):
restriction = [restriction]
self._restriction = [h.name for h in restriction]
def subset(self, subset_pattern):
"""
Limits inventory results to a subset of inventory that matches a given
pattern, such as to select a given geographic of numeric slice amongst
a previous 'hosts' selection that only select roles, or vice versa.
Corresponds to --limit parameter to ansible-playbook
"""
if subset_pattern is None:
self._subset = None
else:
subset_patterns = split_host_pattern(subset_pattern)
results = []
# allow Unix style @filename data
for x in subset_patterns:
if x.startswith("@"):
fd = open(x[1:])
results.extend(fd.read().split("\n"))
fd.close()
else:
results.append(x)
self._subset = results
def remove_restriction(self):
""" Do not restrict list operations """
self._restriction = None
def clear_pattern_cache(self):
self._pattern_cache = {}
|
jnerin/ansible
|
lib/ansible/inventory/manager.py
|
Python
|
gpl-3.0
| 23,570
|