repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
JimCircadian/ansible | refs/heads/devel | lib/ansible/modules/storage/netapp/na_ontap_cluster.py | 8 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: na_ontap_cluster
short_description: Create/Join ONTAP cluster. Apply license to cluster
extends_documentation_fragment:
- netapp.na_ontap
version_added: '2.6'
author: Suhas Bangalore Shekar (bsuhas@netapp.com), Archana Ganesan (garchana@netapp.com)
description:
- Create or join or apply licenses to ONTAP clusters
options:
state:
description:
- Whether the specified cluster should exist or not.
choices: ['present']
default: present
cluster_name:
description:
- The name of the cluster to manage.
cluster_ip_address:
description:
- IP address of cluster to be joined
license_code:
description:
- License code to be applied to the cluster
license_package:
description:
- License package name of the license to be removed
node_serial_number:
description:
- Serial number of the cluster node
'''
EXAMPLES = """
- name: Create cluster
na_ontap_cluster:
state: present
cluster_name: new_cluster
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Add license from cluster
na_ontap_cluster:
state: present
cluster_name: FPaaS-A300-01
license_code: SGHLQDBBVAAAAAAAAAAAAAAAAAAA
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Join cluster
na_ontap_cluster:
state: present
cluster_name: FPaaS-A300
cluster_ip_address: 10.61.184.181
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppONTAPCluster(object):
"""
object initialize and class methods
"""
def __init__(self):
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, choices=['present'], default='present'),
cluster_name=dict(required=False, type='str'),
cluster_ip_address=dict(required=False, type='str'),
license_code=dict(required=False, type='str'),
license_package=dict(required=False, type='str'),
node_serial_number=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True,
required_together=[
['license_package', 'node_serial_number']
],
mutually_exclusive=[
['cluster_name', 'cluster_ip_address'],
]
)
parameters = self.module.params
# set up state variables
self.state = parameters['state']
self.cluster_ip_address = parameters['cluster_ip_address']
self.cluster_name = parameters['cluster_name']
self.license_code = parameters['license_code']
self.license_package = parameters['license_package']
self.node_serial_number = parameters['node_serial_number']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
def create_cluster(self):
"""
Create a cluster
"""
cluster_create = netapp_utils.zapi.NaElement.create_node_with_children(
'cluster-create', **{'cluster-name': self.cluster_name})
try:
self.server.invoke_successfully(cluster_create,
enable_tunneling=True)
return True
except netapp_utils.zapi.NaApiError as error:
# Error 36503 denotes node already being used.
if to_native(error.code) == "36503":
return False
else:
self.module.fail_json(msg='Error creating cluster %s: %s'
% (self.cluster_name, to_native(error)),
exception=traceback.format_exc())
def cluster_join(self):
"""
Add a node to an existing cluster
"""
cluster_add_node = netapp_utils.zapi.NaElement.create_node_with_children(
'cluster-join', **{'cluster-ip-address': self.cluster_ip_address})
try:
self.server.invoke_successfully(cluster_add_node, enable_tunneling=True)
return True
except netapp_utils.zapi.NaApiError as error:
# Error 36503 denotes node already being used.
if to_native(error.code) == "36503":
return False
else:
self.module.fail_json(msg='Error adding node to cluster %s: %s'
% (self.cluster_name, to_native(error)),
exception=traceback.format_exc())
def license_v2_add(self):
"""
Apply a license to cluster
"""
license_add = netapp_utils.zapi.NaElement.create_node_with_children('license-v2-add')
license_add.add_node_with_children('codes', **{'license-code-v2': self.license_code})
try:
self.server.invoke_successfully(license_add, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error adding license to the cluster %s: %s'
% (self.cluster_name, to_native(error)),
exception=traceback.format_exc())
def license_v2_delete(self):
"""
Delete license from cluster
"""
license_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'license-v2-delete', **{'package': self.license_package,
'serial-number': self.node_serial_number})
try:
self.server.invoke_successfully(license_delete, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting license from cluster %s : %s'
% (self.cluster_name, to_native(error)),
exception=traceback.format_exc())
def apply(self):
"""
Apply action to cluster
"""
property_changed = False
create_flag = False
join_flag = False
changed = False
if self.state == 'absent':
pass
elif self.state == 'present': # license add, delete
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if self.cluster_name is not None:
create_flag = self.create_cluster()
if self.cluster_ip_address is not None:
join_flag = self.cluster_join()
if self.license_code is not None:
self.license_v2_add()
property_changed = True
if self.license_package is not None and self.node_serial_number is not None:
self.license_v2_delete()
property_changed = True
changed = property_changed or create_flag or join_flag
self.module.exit_json(changed=changed)
def main():
"""
Create object and call apply
"""
rule_obj = NetAppONTAPCluster()
rule_obj.apply()
if __name__ == '__main__':
main()
|
leorochael/odoo | refs/heads/8.0 | setup/package.py | 180 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import optparse
import os
import pexpect
import shutil
import signal
import subprocess
import tempfile
import time
import xmlrpclib
from contextlib import contextmanager
from glob import glob
from os.path import abspath, dirname, join
from sys import stdout
from tempfile import NamedTemporaryFile
#----------------------------------------------------------
# Utils
#----------------------------------------------------------
execfile(join(dirname(__file__), '..', 'openerp', 'release.py'))
version = version.split('-')[0]
timestamp = time.strftime("%Y%m%d", time.gmtime())
GPGPASSPHRASE = os.getenv('GPGPASSPHRASE')
GPGID = os.getenv('GPGID')
PUBLISH_DIRS = {
'debian': 'deb',
'redhat': 'rpm',
'tarball': 'src',
'windows': 'exe',
}
ADDONS_NOT_TO_PUBLISH = [
'web_analytics'
]
def mkdir(d):
if not os.path.isdir(d):
os.makedirs(d)
def system(l, chdir=None):
print l
if chdir:
cwd = os.getcwd()
os.chdir(chdir)
if isinstance(l, list):
rc = os.spawnvp(os.P_WAIT, l[0], l)
elif isinstance(l, str):
tmp = ['sh', '-c', l]
rc = os.spawnvp(os.P_WAIT, tmp[0], tmp)
if chdir:
os.chdir(cwd)
return rc
def _rpc_count_modules(addr='http://127.0.0.1', port=8069, dbname='mycompany'):
time.sleep(5)
modules = xmlrpclib.ServerProxy('%s:%s/xmlrpc/object' % (addr, port)).execute(
dbname, 1, 'admin', 'ir.module.module', 'search', [('state', '=', 'installed')]
)
if modules and len(modules) > 1:
time.sleep(1)
toinstallmodules = xmlrpclib.ServerProxy('%s:%s/xmlrpc/object' % (addr, port)).execute(
dbname, 1, 'admin', 'ir.module.module', 'search', [('state', '=', 'to install')]
)
if toinstallmodules:
print("Package test: FAILED. Not able to install dependencies of base.")
raise Exception("Installation of package failed")
else:
print("Package test: successfuly installed %s modules" % len(modules))
else:
print("Package test: FAILED. Not able to install base.")
raise Exception("Installation of package failed")
def publish(o, type, extensions):
def _publish(o, release):
arch = ''
filename = release.split(os.path.sep)[-1]
release_dir = PUBLISH_DIRS[type]
release_path = join(o.pub, release_dir, filename)
system('mkdir -p %s' % join(o.pub, release_dir))
shutil.move(join(o.build_dir, release), release_path)
# Latest/symlink handler
release_abspath = abspath(release_path)
latest_abspath = release_abspath.replace(timestamp, 'latest')
if os.path.islink(latest_abspath):
os.unlink(latest_abspath)
os.symlink(release_abspath, latest_abspath)
return release_path
published = []
for extension in extensions:
release = glob("%s/odoo_*.%s" % (o.build_dir, extension))[0]
published.append(_publish(o, release))
return published
class OdooDocker(object):
def __init__(self):
self.log_file = NamedTemporaryFile(mode='w+b', prefix="bash", suffix=".txt", delete=False)
self.port = 8069 # TODO sle: reliable way to get a free port?
self.prompt_re = '[root@nightly-tests] # '
self.timeout = 600
def system(self, command):
self.docker.sendline(command)
self.docker.expect_exact(self.prompt_re)
def start(self, docker_image, build_dir, pub_dir):
self.build_dir = build_dir
self.pub_dir = pub_dir
self.docker = pexpect.spawn(
'docker run -v %s:/opt/release -p 127.0.0.1:%s:8069'
' -t -i %s /bin/bash --noediting' % (self.build_dir, self.port, docker_image),
timeout=self.timeout,
searchwindowsize=len(self.prompt_re) + 1,
)
time.sleep(2) # let the bash start
self.docker.logfile_read = self.log_file
self.id = subprocess.check_output('docker ps -l -q', shell=True)
def end(self):
try:
_rpc_count_modules(port=str(self.port))
except Exception, e:
print('Exception during docker execution: %s:' % str(e))
print('Error during docker execution: printing the bash output:')
with open(self.log_file.name) as f:
print '\n'.join(f.readlines())
raise
finally:
self.docker.close()
system('docker rm -f %s' % self.id)
self.log_file.close()
os.remove(self.log_file.name)
@contextmanager
def docker(docker_image, build_dir, pub_dir):
_docker = OdooDocker()
try:
_docker.start(docker_image, build_dir, pub_dir)
try:
yield _docker
except Exception, e:
raise
finally:
_docker.end()
class KVM(object):
def __init__(self, o, image, ssh_key='', login='openerp'):
self.o = o
self.image = image
self.ssh_key = ssh_key
self.login = login
def timeout(self,signum,frame):
print "vm timeout kill",self.pid
os.kill(self.pid,15)
def start(self):
l="kvm -net nic,model=rtl8139 -net user,hostfwd=tcp:127.0.0.1:10022-:22,hostfwd=tcp:127.0.0.1:18069-:8069,hostfwd=tcp:127.0.0.1:15432-:5432 -drive".split(" ")
#l.append('file=%s,if=virtio,index=0,boot=on,snapshot=on'%self.image)
l.append('file=%s,snapshot=on'%self.image)
#l.extend(['-vnc','127.0.0.1:1'])
l.append('-nographic')
print " ".join(l)
self.pid=os.spawnvp(os.P_NOWAIT, l[0], l)
time.sleep(10)
signal.alarm(2400)
signal.signal(signal.SIGALRM, self.timeout)
try:
self.run()
finally:
signal.signal(signal.SIGALRM, signal.SIG_DFL)
os.kill(self.pid,15)
time.sleep(10)
def ssh(self,cmd):
l=['ssh','-o','UserKnownHostsFile=/dev/null','-o','StrictHostKeyChecking=no','-p','10022','-i',self.ssh_key,'%s@127.0.0.1'%self.login,cmd]
system(l)
def rsync(self,args,options='--delete --exclude .bzrignore'):
cmd ='rsync -rt -e "ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -p 10022 -i %s" %s %s' % (self.ssh_key, options, args)
system(cmd)
def run(self):
pass
class KVMWinBuildExe(KVM):
def run(self):
with open(join(self.o.build_dir, 'setup/win32/Makefile.version'), 'w') as f:
f.write("VERSION=%s\n" % self.o.version_full)
with open(join(self.o.build_dir, 'setup/win32/Makefile.python'), 'w') as f:
f.write("PYTHON_VERSION=%s\n" % self.o.vm_winxp_python_version.replace('.', ''))
self.ssh("mkdir -p build")
self.rsync('%s/ %s@127.0.0.1:build/server/' % (self.o.build_dir, self.login))
self.ssh("cd build/server/setup/win32;time make allinone;")
self.rsync('%s@127.0.0.1:build/server/setup/win32/release/ %s/' % (self.login, self.o.build_dir), '')
print "KVMWinBuildExe.run(): done"
class KVMWinTestExe(KVM):
def run(self):
# Cannot use o.version_full when the version is not correctly parsed
# (for instance, containing *rc* or *dev*)
setuppath = glob("%s/openerp-server-setup-*.exe" % self.o.build_dir)[0]
setupfile = setuppath.split('/')[-1]
setupversion = setupfile.split('openerp-server-setup-')[1].split('.exe')[0]
self.rsync('"%s" %s@127.0.0.1:' % (setuppath, self.login))
self.ssh("TEMP=/tmp ./%s /S" % setupfile)
self.ssh('PGPASSWORD=openpgpwd /cygdrive/c/"Program Files"/"Odoo %s"/PostgreSQL/bin/createdb.exe -e -U openpg mycompany' % setupversion)
self.ssh('/cygdrive/c/"Program Files"/"Odoo %s"/server/openerp-server.exe -d mycompany -i base --stop-after-init' % setupversion)
self.ssh('net start odoo-server-8.0')
_rpc_count_modules(port=18069)
#----------------------------------------------------------
# Stage: building
#----------------------------------------------------------
def _prepare_build_dir(o, win32=False):
cmd = ['rsync', '-a', '--exclude', '.git', '--exclude', '*.pyc', '--exclude', '*.pyo']
if not win32:
cmd += ['--exclude', 'setup/win32']
system(cmd + ['%s/' % o.odoo_dir, o.build_dir])
try:
for addon_path in glob(join(o.build_dir, 'addons/*')):
if addon_path.split(os.path.sep)[-1] not in ADDONS_NOT_TO_PUBLISH:
shutil.move(addon_path, join(o.build_dir, 'openerp/addons'))
except shutil.Error:
# Thrown when the add-on is already in openerp/addons (if _prepare_build_dir
# has already been called once)
pass
def build_tgz(o):
system(['python2', 'setup.py', 'sdist', '--quiet', '--formats=gztar,zip'], o.build_dir)
system(['mv', glob('%s/dist/odoo-*.tar.gz' % o.build_dir)[0], '%s/odoo_%s.%s.tar.gz' % (o.build_dir, version, timestamp)])
system(['mv', glob('%s/dist/odoo-*.zip' % o.build_dir)[0], '%s/odoo_%s.%s.zip' % (o.build_dir, version, timestamp)])
def build_deb(o):
# Append timestamp to version for the .dsc to refer the right .tar.gz
cmd=['sed', '-i', '1s/^.*$/odoo (%s.%s) stable; urgency=low/'%(version,timestamp), 'debian/changelog']
subprocess.call(cmd, cwd=o.build_dir)
deb = pexpect.spawn('dpkg-buildpackage -rfakeroot -k%s' % GPGID, cwd=o.build_dir)
deb.logfile = stdout
if GPGPASSPHRASE:
deb.expect_exact('Enter passphrase: ', timeout=1200)
deb.send(GPGPASSPHRASE + '\r\n')
deb.expect_exact('Enter passphrase: ')
deb.send(GPGPASSPHRASE + '\r\n')
deb.expect(pexpect.EOF, timeout=1200)
system(['mv', glob('%s/../odoo_*.deb' % o.build_dir)[0], '%s' % o.build_dir])
system(['mv', glob('%s/../odoo_*.dsc' % o.build_dir)[0], '%s' % o.build_dir])
system(['mv', glob('%s/../odoo_*_amd64.changes' % o.build_dir)[0], '%s' % o.build_dir])
system(['mv', glob('%s/../odoo_*.tar.gz' % o.build_dir)[0], '%s' % o.build_dir])
def build_rpm(o):
system(['python2', 'setup.py', '--quiet', 'bdist_rpm'], o.build_dir)
system(['mv', glob('%s/dist/odoo-*.noarch.rpm' % o.build_dir)[0], '%s/odoo_%s.%s.noarch.rpm' % (o.build_dir, version, timestamp)])
def build_exe(o):
KVMWinBuildExe(o, o.vm_winxp_image, o.vm_winxp_ssh_key, o.vm_winxp_login).start()
system(['cp', glob('%s/openerp*.exe' % o.build_dir)[0], '%s/odoo_%s.%s.exe' % (o.build_dir, version, timestamp)])
#----------------------------------------------------------
# Stage: testing
#----------------------------------------------------------
def _prepare_testing(o):
if not o.no_tarball:
subprocess.call(["mkdir", "docker_src"], cwd=o.build_dir)
subprocess.call(["cp", "package.dfsrc", os.path.join(o.build_dir, "docker_src", "Dockerfile")],
cwd=os.path.join(o.odoo_dir, "setup"))
# Use rsync to copy requirements.txt in order to keep original permissions
subprocess.call(["rsync", "-a", "requirements.txt", os.path.join(o.build_dir, "docker_src")],
cwd=os.path.join(o.odoo_dir))
subprocess.call(["docker", "build", "-t", "odoo-%s-src-nightly-tests" % version, "."],
cwd=os.path.join(o.build_dir, "docker_src"))
if not o.no_debian:
subprocess.call(["mkdir", "docker_debian"], cwd=o.build_dir)
subprocess.call(["cp", "package.dfdebian", os.path.join(o.build_dir, "docker_debian", "Dockerfile")],
cwd=os.path.join(o.odoo_dir, "setup"))
# Use rsync to copy requirements.txt in order to keep original permissions
subprocess.call(["rsync", "-a", "requirements.txt", os.path.join(o.build_dir, "docker_debian")],
cwd=os.path.join(o.odoo_dir))
subprocess.call(["docker", "build", "-t", "odoo-%s-debian-nightly-tests" % version, "."],
cwd=os.path.join(o.build_dir, "docker_debian"))
if not o.no_rpm:
subprocess.call(["mkdir", "docker_centos"], cwd=o.build_dir)
subprocess.call(["cp", "package.dfcentos", os.path.join(o.build_dir, "docker_centos", "Dockerfile")],
cwd=os.path.join(o.odoo_dir, "setup"))
subprocess.call(["docker", "build", "-t", "odoo-%s-centos-nightly-tests" % version, "."],
cwd=os.path.join(o.build_dir, "docker_centos"))
def test_tgz(o):
with docker('odoo-%s-src-nightly-tests' % version, o.build_dir, o.pub) as wheezy:
wheezy.release = '*.tar.gz'
wheezy.system("service postgresql start")
wheezy.system('pip install /opt/release/%s' % wheezy.release)
wheezy.system("useradd --system --no-create-home odoo")
wheezy.system('su postgres -s /bin/bash -c "createuser -s odoo"')
wheezy.system('su postgres -s /bin/bash -c "createdb mycompany"')
wheezy.system('mkdir /var/lib/odoo')
wheezy.system('chown odoo:odoo /var/lib/odoo')
wheezy.system('su odoo -s /bin/bash -c "odoo.py --addons-path=/usr/local/lib/python2.7/dist-packages/openerp/addons -d mycompany -i base --stop-after-init"')
wheezy.system('su odoo -s /bin/bash -c "odoo.py --addons-path=/usr/local/lib/python2.7/dist-packages/openerp/addons -d mycompany &"')
def test_deb(o):
with docker('odoo-%s-debian-nightly-tests' % version, o.build_dir, o.pub) as wheezy:
wheezy.release = '*.deb'
wheezy.system("service postgresql start")
wheezy.system('su postgres -s /bin/bash -c "createdb mycompany"')
wheezy.system('/usr/bin/dpkg -i /opt/release/%s' % wheezy.release)
wheezy.system('/usr/bin/apt-get install -f -y')
wheezy.system('su odoo -s /bin/bash -c "odoo.py -c /etc/odoo/openerp-server.conf -d mycompany -i base --stop-after-init"')
wheezy.system('su odoo -s /bin/bash -c "odoo.py -c /etc/odoo/openerp-server.conf -d mycompany &"')
def test_rpm(o):
with docker('odoo-%s-centos-nightly-tests' % version, o.build_dir, o.pub) as centos7:
centos7.release = '*.noarch.rpm'
# Start postgresql
centos7.system('su postgres -c "/usr/bin/pg_ctl -D /var/lib/postgres/data start"')
centos7.system('sleep 5')
centos7.system('su postgres -c "createdb mycompany"')
# Odoo install
centos7.system('yum install -d 0 -e 0 /opt/release/%s -y' % centos7.release)
centos7.system('su odoo -s /bin/bash -c "openerp-server -c /etc/odoo/openerp-server.conf -d mycompany -i base --stop-after-init"')
centos7.system('su odoo -s /bin/bash -c "openerp-server -c /etc/odoo/openerp-server.conf -d mycompany &"')
def test_exe(o):
KVMWinTestExe(o, o.vm_winxp_image, o.vm_winxp_ssh_key, o.vm_winxp_login).start()
#---------------------------------------------------------
# Generates Packages, Sources and Release files of debian package
#---------------------------------------------------------
def gen_deb_package(o, published_files):
# Executes command to produce file_name in path, and moves it to o.pub/deb
def _gen_file(o, (command, file_name), path):
cur_tmp_file_path = os.path.join(path, file_name)
with open(cur_tmp_file_path, 'w') as out:
subprocess.call(command, stdout=out, cwd=path)
system(['cp', cur_tmp_file_path, os.path.join(o.pub, 'deb', file_name)])
# Copy files to a temp directory (required because the working directory must contain only the
# files of the last release)
temp_path = tempfile.mkdtemp(suffix='debPackages')
for pub_file_path in published_files:
system(['cp', pub_file_path, temp_path])
commands = [
(['dpkg-scanpackages', '.'], "Packages"), # Generate Packages file
(['dpkg-scansources', '.'], "Sources"), # Generate Sources file
(['apt-ftparchive', 'release', '.'], "Release") # Generate Release file
]
# Generate files
for command in commands:
_gen_file(o, command, temp_path)
# Remove temp directory
shutil.rmtree(temp_path)
# Generate Release.gpg (= signed Release)
# Options -abs: -a (Create ASCII armored output), -b (Make a detach signature), -s (Make a signature)
subprocess.call(['gpg', '--default-key', GPGID, '--passphrase', GPGPASSPHRASE, '--yes', '-abs', '--no-tty', '-o', 'Release.gpg', 'Release'], cwd=os.path.join(o.pub, 'deb'))
#---------------------------------------------------------
# Generates an RPM repo
#---------------------------------------------------------
def gen_rpm_repo(o, file_name):
# Sign the RPM
rpmsign = pexpect.spawn('/bin/bash', ['-c', 'rpm --resign %s' % file_name], cwd=os.path.join(o.pub, 'rpm'))
rpmsign.expect_exact('Enter pass phrase: ')
rpmsign.send(GPGPASSPHRASE + '\r\n')
rpmsign.expect(pexpect.EOF)
# Removes the old repodata
subprocess.call(['rm', '-rf', os.path.join(o.pub, 'rpm', 'repodata')])
# Copy files to a temp directory (required because the working directory must contain only the
# files of the last release)
temp_path = tempfile.mkdtemp(suffix='rpmPackages')
subprocess.call(['cp', file_name, temp_path])
subprocess.call(['createrepo', temp_path]) # creates a repodata folder in temp_path
subprocess.call(['cp', '-r', os.path.join(temp_path, "repodata"), os.path.join(o.pub, 'rpm')])
# Remove temp directory
shutil.rmtree(temp_path)
#----------------------------------------------------------
# Options and Main
#----------------------------------------------------------
def options():
op = optparse.OptionParser()
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
build_dir = "%s-%s" % (root, timestamp)
op.add_option("-b", "--build-dir", default=build_dir, help="build directory (%default)", metavar="DIR")
op.add_option("-p", "--pub", default=None, help="pub directory (%default)", metavar="DIR")
op.add_option("", "--no-testing", action="store_true", help="don't test the builded packages")
op.add_option("-v", "--version", default='8.0', help="version (%default)")
op.add_option("", "--no-debian", action="store_true", help="don't build the debian package")
op.add_option("", "--no-rpm", action="store_true", help="don't build the rpm package")
op.add_option("", "--no-tarball", action="store_true", help="don't build the tarball")
op.add_option("", "--no-windows", action="store_true", help="don't build the windows package")
# Windows VM
op.add_option("", "--vm-winxp-image", default='/home/odoo/vm/winxp27/winxp27.vdi', help="%default")
op.add_option("", "--vm-winxp-ssh-key", default='/home/odoo/vm/winxp27/id_rsa', help="%default")
op.add_option("", "--vm-winxp-login", default='Naresh', help="Windows login (%default)")
op.add_option("", "--vm-winxp-python-version", default='2.7', help="Windows Python version installed in the VM (default: %default)")
(o, args) = op.parse_args()
# derive other options
o.odoo_dir = root
o.pkg = join(o.build_dir, 'pkg')
o.version_full = '%s-%s' % (o.version, timestamp)
o.work = join(o.build_dir, 'openerp-%s' % o.version_full)
o.work_addons = join(o.work, 'openerp', 'addons')
return o
def main():
o = options()
_prepare_build_dir(o)
if not o.no_testing:
_prepare_testing(o)
try:
if not o.no_tarball:
build_tgz(o)
try:
if not o.no_testing:
test_tgz(o)
published_files = publish(o, 'tarball', ['tar.gz', 'zip'])
except Exception, e:
print("Won't publish the tgz release.\n Exception: %s" % str(e))
if not o.no_debian:
build_deb(o)
try:
if not o.no_testing:
test_deb(o)
published_files = publish(o, 'debian', ['deb', 'dsc', 'changes', 'tar.gz'])
gen_deb_package(o, published_files)
except Exception, e:
print("Won't publish the deb release.\n Exception: %s" % str(e))
if not o.no_rpm:
build_rpm(o)
try:
if not o.no_testing:
test_rpm(o)
published_files = publish(o, 'redhat', ['noarch.rpm'])
gen_rpm_repo(o, published_files[0])
except Exception, e:
print("Won't publish the rpm release.\n Exception: %s" % str(e))
if not o.no_windows:
_prepare_build_dir(o, win32=True)
build_exe(o)
try:
if not o.no_testing:
test_exe(o)
published_files = publish(o, 'windows', ['exe'])
except Exception, e:
print("Won't publish the exe release.\n Exception: %s" % str(e))
except:
pass
finally:
shutil.rmtree(o.build_dir)
print('Build dir %s removed' % o.build_dir)
if not o.no_testing:
system("docker rm -f `docker ps -a | awk '{print $1 }'` 2>>/dev/null")
print('Remaining dockers removed')
if __name__ == '__main__':
main()
|
SohKai/ChronoLogger | refs/heads/master | web/flask/lib/python2.7/site-packages/sqlalchemy/connectors/zxJDBC.py | 18 | # connectors/zxJDBC.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import sys
from sqlalchemy.connectors import Connector
class ZxJDBCConnector(Connector):
driver = 'zxjdbc'
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
supports_unicode_binds = True
supports_unicode_statements = sys.version > '2.5.0+'
description_encoding = None
default_paramstyle = 'qmark'
jdbc_db_name = None
jdbc_driver_name = None
@classmethod
def dbapi(cls):
from com.ziclix.python.sql import zxJDBC
return zxJDBC
def _driver_kwargs(self):
"""Return kw arg dict to be sent to connect()."""
return {}
def _create_jdbc_url(self, url):
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
url.port is not None
and ':%s' % url.port or '',
url.database)
def create_connect_args(self, url):
opts = self._driver_kwargs()
opts.update(url.query)
return [
[self._create_jdbc_url(url),
url.username, url.password,
self.jdbc_driver_name],
opts]
def is_disconnect(self, e, connection, cursor):
if not isinstance(e, self.dbapi.ProgrammingError):
return False
e = str(e)
return 'connection is closed' in e or 'cursor is closed' in e
def _get_server_version_info(self, connection):
# use connection.connection.dbversion, and parse appropriately
# to get a tuple
raise NotImplementedError()
|
indictranstech/internal-erpnext | refs/heads/develop | erpnext/manufacturing/doctype/production_planning_tool/__init__.py | 37694 | from __future__ import unicode_literals
|
SmileyChris/django-countries | refs/heads/master | django_countries/tests/forms.py | 1 | from django import forms
from django_countries.tests import models
class PersonForm(forms.ModelForm):
class Meta:
model = models.Person
fields = ["country", "favourite_country"]
class AllowNullForm(forms.ModelForm):
class Meta:
model = models.AllowNull
fields = ["country"]
class MultiCountryForm(forms.ModelForm):
class Meta:
model = models.MultiCountry
fields = ["countries"]
|
kumarshivam675/Mobile10X-Hack | refs/heads/master | build/lib.linux-x86_64-2.7/yowsup/layers/protocol_groups/protocolentities/test_iq_result_groups.py | 63 | from yowsup.layers.protocol_iq.protocolentities.test_iq_result import ResultIqProtocolEntityTest
class GroupsResultIqProtocolEntityTest(ResultIqProtocolEntityTest):
pass |
kfox1111/horizon | refs/heads/master | openstack_dashboard/api/fwaas.py | 36 | # Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django.utils.datastructures import SortedDict
from horizon.utils import memoized
from openstack_dashboard.api import neutron
neutronclient = neutron.neutronclient
class Rule(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron firewall rule."""
def get_dict(self):
rule_dict = self._apidict
rule_dict['rule_id'] = rule_dict['id']
return rule_dict
class Policy(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron firewall policy."""
def get_dict(self):
policy_dict = self._apidict
policy_dict['policy_id'] = policy_dict['id']
return policy_dict
class Firewall(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron firewall."""
def __init__(self, apiresource):
apiresource['admin_state'] = \
'UP' if apiresource['admin_state_up'] else 'DOWN'
super(Firewall, self).__init__(apiresource)
def get_dict(self):
firewall_dict = self._apidict
firewall_dict['firewall_id'] = firewall_dict['id']
return firewall_dict
def rule_create(request, **kwargs):
"""Create a firewall rule
:param request: request context
:param name: name for rule
:param description: description for rule
:param protocol: protocol for rule
:param action: action for rule
:param source_ip_address: source IP address or subnet
:param source_port: integer in [1, 65535] or range in a:b
:param destination_ip_address: destination IP address or subnet
:param destination_port: integer in [1, 65535] or range in a:b
:param shared: boolean (default false)
:param enabled: boolean (default true)
:return: Rule object
"""
body = {'firewall_rule': kwargs}
rule = neutronclient(request).create_firewall_rule(
body).get('firewall_rule')
return Rule(rule)
def rule_list(request, **kwargs):
return _rule_list(request, expand_policy=True, **kwargs)
def rule_list_for_tenant(request, tenant_id, **kwargs):
"""Return a rule list available for the tenant.
The list contains rules owned by the tenant and shared rules.
This is required because Neutron returns all resources including
all tenants if a user has admin role.
"""
rules = rule_list(request, tenant_id=tenant_id, shared=False, **kwargs)
shared_rules = rule_list(request, shared=True, **kwargs)
return rules + shared_rules
def _rule_list(request, expand_policy, **kwargs):
rules = neutronclient(request).list_firewall_rules(
**kwargs).get('firewall_rules')
if expand_policy and rules:
policies = _policy_list(request, expand_rule=False)
policy_dict = SortedDict((p.id, p) for p in policies)
for rule in rules:
rule['policy'] = policy_dict.get(rule['firewall_policy_id'])
return [Rule(r) for r in rules]
def rule_get(request, rule_id):
return _rule_get(request, rule_id, expand_policy=True)
def _rule_get(request, rule_id, expand_policy):
rule = neutronclient(request).show_firewall_rule(
rule_id).get('firewall_rule')
if expand_policy:
if rule['firewall_policy_id']:
rule['policy'] = _policy_get(request, rule['firewall_policy_id'],
expand_rule=False)
else:
rule['policy'] = None
return Rule(rule)
def rule_delete(request, rule_id):
neutronclient(request).delete_firewall_rule(rule_id)
def rule_update(request, rule_id, **kwargs):
body = {'firewall_rule': kwargs}
rule = neutronclient(request).update_firewall_rule(
rule_id, body).get('firewall_rule')
return Rule(rule)
def policy_create(request, **kwargs):
"""Create a firewall policy
:param request: request context
:param name: name for policy
:param description: description for policy
:param firewall_rules: ordered list of rules in policy
:param shared: boolean (default false)
:param audited: boolean (default false)
:return: Policy object
"""
body = {'firewall_policy': kwargs}
policy = neutronclient(request).create_firewall_policy(
body).get('firewall_policy')
return Policy(policy)
def policy_list(request, **kwargs):
return _policy_list(request, expand_rule=True, **kwargs)
def policy_list_for_tenant(request, tenant_id, **kwargs):
"""Return a policy list available for the tenant.
The list contains policies owned by the tenant and shared policies.
This is required because Neutron returns all resources including
all tenants if a user has admin role.
"""
policies = policy_list(request, tenant_id=tenant_id,
shared=False, **kwargs)
shared_policies = policy_list(request, shared=True, **kwargs)
return policies + shared_policies
def _policy_list(request, expand_rule, **kwargs):
policies = neutronclient(request).list_firewall_policies(
**kwargs).get('firewall_policies')
if expand_rule and policies:
rules = _rule_list(request, expand_policy=False)
rule_dict = SortedDict((rule.id, rule) for rule in rules)
for p in policies:
p['rules'] = [rule_dict.get(rule) for rule in p['firewall_rules']]
return [Policy(p) for p in policies]
def policy_get(request, policy_id):
return _policy_get(request, policy_id, expand_rule=True)
def _policy_get(request, policy_id, expand_rule):
policy = neutronclient(request).show_firewall_policy(
policy_id).get('firewall_policy')
if expand_rule:
policy_rules = policy['firewall_rules']
if policy_rules:
rules = _rule_list(request, expand_policy=False,
firewall_policy_id=policy_id)
rule_dict = SortedDict((rule.id, rule) for rule in rules)
policy['rules'] = [rule_dict.get(rule) for rule in policy_rules]
else:
policy['rules'] = []
return Policy(policy)
def policy_delete(request, policy_id):
neutronclient(request).delete_firewall_policy(policy_id)
def policy_update(request, policy_id, **kwargs):
body = {'firewall_policy': kwargs}
policy = neutronclient(request).update_firewall_policy(
policy_id, body).get('firewall_policy')
return Policy(policy)
def policy_insert_rule(request, policy_id, **kwargs):
policy = neutronclient(request).firewall_policy_insert_rule(
policy_id, kwargs)
return Policy(policy)
def policy_remove_rule(request, policy_id, **kwargs):
policy = neutronclient(request).firewall_policy_remove_rule(
policy_id, kwargs)
return Policy(policy)
def firewall_create(request, **kwargs):
"""Create a firewall for specified policy
:param request: request context
:param name: name for firewall
:param description: description for firewall
:param firewall_policy_id: policy id used by firewall
:param shared: boolean (default false)
:param admin_state_up: boolean (default true)
:return: Firewall object
"""
body = {'firewall': kwargs}
firewall = neutronclient(request).create_firewall(body).get('firewall')
return Firewall(firewall)
def firewall_list(request, **kwargs):
return _firewall_list(request, expand_policy=True, **kwargs)
def firewall_list_for_tenant(request, tenant_id, **kwargs):
"""Return a firewall list available for the tenant.
The list contains firewalls owned by the tenant and shared firewalls.
This is required because Neutron returns all resources including
all tenants if a user has admin role.
"""
# NOTE(amotoki): At now 'shared' attribute is not visible in Neutron
# and there is no way to query shared firewalls explicitly.
# Thus this method returns the same as when tenant_id is specified,
# but I would like to have this method for symmetry to firewall
# rules and policies to avoid unnecessary confusion.
return firewall_list(request, tenant_id=tenant_id, **kwargs)
def _firewall_list(request, expand_policy, **kwargs):
firewalls = neutronclient(request).list_firewalls(
**kwargs).get('firewalls')
if expand_policy and firewalls:
policies = _policy_list(request, expand_rule=False)
policy_dict = SortedDict((p.id, p) for p in policies)
for fw in firewalls:
fw['policy'] = policy_dict.get(fw['firewall_policy_id'])
return [Firewall(f) for f in firewalls]
def firewall_get(request, firewall_id):
return _firewall_get(request, firewall_id, expand_policy=True)
def _firewall_get(request, firewall_id, expand_policy):
firewall = neutronclient(request).show_firewall(
firewall_id).get('firewall')
if expand_policy:
policy_id = firewall['firewall_policy_id']
if policy_id:
firewall['policy'] = _policy_get(request, policy_id,
expand_rule=False)
else:
firewall['policy'] = None
return Firewall(firewall)
def firewall_delete(request, firewall_id):
neutronclient(request).delete_firewall(firewall_id)
def firewall_update(request, firewall_id, **kwargs):
body = {'firewall': kwargs}
firewall = neutronclient(request).update_firewall(
firewall_id, body).get('firewall')
return Firewall(firewall)
@memoized.memoized
def firewall_unassociated_routers_list(request, tenant_id):
all_routers = neutron.router_list(request, tenant_id=tenant_id)
tenant_firewalls = firewall_list_for_tenant(request, tenant_id=tenant_id)
firewall_router_ids = [rid
for fw in tenant_firewalls
for rid in getattr(fw, 'router_ids', [])]
available_routers = [r for r in all_routers
if r.id not in firewall_router_ids]
available_routers = sorted(available_routers,
key=lambda router: router.name_or_id)
return available_routers
|
skidzo/sympy | refs/heads/master | sympy/core/evaluate.py | 96 | from .cache import clear_cache
from contextlib import contextmanager
class _global_evaluate(list):
""" The cache must be cleared whenever global_evaluate is changed. """
def __setitem__(self, key, value):
clear_cache()
super(_global_evaluate, self).__setitem__(key, value)
global_evaluate = _global_evaluate([True])
@contextmanager
def evaluate(x):
""" Control automatic evaluation
This context managers controls whether or not all SymPy functions evaluate
by default.
Note that much of SymPy expects evaluated expressions. This functionality
is experimental and is unlikely to function as intended on large
expressions.
Examples
========
>>> from sympy.abc import x
>>> from sympy.core.evaluate import evaluate
>>> print(x + x)
2*x
>>> with evaluate(False):
... print(x + x)
x + x
"""
old = global_evaluate[0]
global_evaluate[0] = x
yield
global_evaluate[0] = old
|
FRidh/python-acoustics | refs/heads/master | tests/test_bands.py | 3 | from __future__ import division
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
import pytest
from acoustics.bands import (octave, octave_high, octave_low, third,
third_low, third_high, third2oct,
_check_band_type)
def setup_module(bands):
bands.octave_real = np.array([16, 31.5, 63, 125, 250, 500,
1000, 2000, 4000, 8000, 16000])
bands.third_real = np.array([12.5, 16, 20, 25, 31.5, 40,
50, 63, 80, 100, 125, 160,
200, 250, 315, 400, 500, 630,
800, 1000, 1250, 1600, 2000, 2500,
3150, 4000 , 5000, 6300, 8000, 10000,
12500, 16000, 20000])
def test_octave():
generated = octave(16, 16000)
real = octave_real
assert_array_equal(generated, real)
def test_octave_high():
generated = octave_high(16, 16000)
real = octave_real * np.sqrt(2)
assert_array_almost_equal(generated, real)
def test_octave_low():
generated = octave_low(16, 16000)
real = real = octave_real / np.sqrt(2)
assert_array_almost_equal(generated, real)
def test_third():
generated = third(12.5, 20000)
real = third_real
assert_array_equal(generated, real)
def test_third_high():
generated = third_high(12.5, 20000)
real = third_real * 2**(1/6)
assert_array_almost_equal(generated, real)
def test_third_low():
generated = third_low(12.5, 20000)
real = third_real / 2**(1/6)
assert_array_almost_equal(generated, real)
#def test_third2oct():
#levels = np.array([100, 95, 80, 55, 65, 85, 75, 70, 90, 95, 105, 110])
#generated = third2oct(levels)
#real = np.array([101.22618116, 85.04751156, 90.17710468, 111.29641738])
#assert_array_almost_equal(generated, real)
def test_third2oct():
levels = np.array([10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0])
generated = third2oct(levels)
real = np.array([ 14.77121255, 14.77121255, 14.77121255])
assert_array_almost_equal(generated, real)
def test_third2oct_2darray_axis0():
levels = np.array([[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[ 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0],
[100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]
])
generated = third2oct(levels, axis=0)
real = np.array([100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0])
assert_array_almost_equal(generated, real)
def test_third2oct_2darray_axis1():
levels = np.array([[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
[ 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0],
[100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]
])
generated = third2oct(levels, axis=1)
real = np.array([[ 5.77121255, 5.77121255, 5.77121255],
[ 14.77121255, 14.77121255, 14.77121255],
[ 104.77121255, 104.77121255, 104.77121255]
])
assert_array_almost_equal(generated, real)
def test_third2oct_3darray_axis0():
# Array of ones with shape (3,4,5)
levels = np.array([[[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.]],
[[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.]],
[[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.],
[ 1., 1., 1., 1., 1.]]])
generated = third2oct(levels, axis=0)
real = np.array([[ 5.77121255, 5.77121255, 5.77121255, 5.77121255, 5.77121255],
[ 5.77121255, 5.77121255, 5.77121255, 5.77121255, 5.77121255],
[ 5.77121255, 5.77121255, 5.77121255, 5.77121255, 5.77121255],
[ 5.77121255, 5.77121255, 5.77121255, 5.77121255, 5.77121255]])
assert_array_almost_equal(generated, real)
def test_third2oct_2darray():
levels = np.array([[100, 95, 80, 55, 65, 85, 75, 70, 90, 95, 105, 110],
[100, 95, 80, 55, 65, 85, 75, 70, 90, 95, 105, 110]])
generated = third2oct(levels, axis=1)
real = np.array([[101.22618116, 85.04751156, 90.17710468, 111.29641738],
[101.22618116, 85.04751156, 90.17710468, 111.29641738]])
assert_array_almost_equal(generated, real)
@pytest.mark.parametrize("freqs, expected", [
(np.array([125, 250, 500]), 'octave'),
(np.array([12.5 , 16, 20]), 'third'),
(np.array([125, 250, 1000, 4000]), 'octave-unsorted'),
(np.array([12.5, 800, 500, 5000]), 'third-unsorted'),
(np.array([100, 200, 300, 400]), None),
])
def test__check_band_type(freqs, expected):
band_type = _check_band_type(freqs)
assert_array_equal(band_type, expected)
def teardown_module(bands):
pass
|
ShakedY/ai-project | refs/heads/master | lama/translate/pddl/predicates.py | 6 | #######################################################################
#
# Author: Malte Helmert (helmert@informatik.uni-freiburg.de)
# (C) Copyright 2003-2004 Malte Helmert
#
# This file is part of LAMA.
#
# LAMA is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the license, or (at your option) any later version.
#
# LAMA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
import pddl_types
class Predicate(object):
def __init__(self, name, arguments):
self.name = name
self.arguments = arguments
def parse(alist):
name = alist[0]
arguments = pddl_types.parse_typed_list(alist[1:], only_variables=True)
return Predicate(name, arguments)
parse = staticmethod(parse)
def __str__(self):
return "%s(%s)" % (self.name, ", ".join(map(str, self.arguments)))
|
Maccimo/intellij-community | refs/heads/master | python/testData/keywordCompletion/elif.py | 83 | if True:
a = 1
eli<caret> |
edersohe/restor | refs/heads/master | restor.py | 1 | import tornado.ioloop
import tornado.web
import traceback
import tornado.httputil
import tornado.escape
def action_routes(prefix, id_regex='[0-9a-f]+'):
_id = "(?P<_id>%s)" % id_regex
_arg1 = "(?:/(?P<_arg1>edit|delete))?"
_arg2 = "(?P<_arg2>new)"
route = "(?:/(?:(?:%s%s)|%s))?" % (_id, _arg1, _arg2)
return r'' + prefix + route
class ResourceHandler(tornado.web.RequestHandler):
def get(self, *args, _id=None, _arg1=None, _arg2=None, **kwargs):
if not _id and not _arg2 and not _arg1:
self.index(*args, **kwargs)
elif _id and not _arg2 and not _arg1:
self.read(_id, *args, **kwargs)
elif not _id and not _arg1 and _arg2 == 'new':
self.new(*args, **kwargs)
elif _id and not _arg2 and _arg1 == 'edit':
self.edit(_id, *args, **kwargs)
else:
raise tornado.web.HTTPError(404)
def post(self, *args, _id=None, _arg1=None, _arg2=None, **kwargs):
if not _id and not _arg1 and not _arg2:
self.create(*args, **kwargs)
elif _id and not _arg1 and not _arg2:
self.update(_id, *args, **kwargs)
elif _id and not _arg2 and _arg1 == 'delete':
self.destroy(_id, *args, **kwargs)
else:
raise tornado.web.HTTPError(404)
def put(self, *args, _id=None, _arg1=None, _arg2=None, **kwargs):
if _id and not _arg1 and not _arg2:
self.update(_id, *args, **kwargs)
else:
raise tornado.web.HTTPError(404)
def delete(self, *args, _id=None, _arg1=None, _arg2=None, **kwargs):
if _id and not _arg1 and not _arg2:
self.destroy(_id, *args, **kwargs)
else:
raise tornado.web.HTTPError(404)
def index(self, *args, **kwargs):
self.write({"hola": ["hola", 1, {"option": True}, True]})
# self.write("Hola Mundo", iter=True)
# raise tornado.web.HTTPError(405)
def new(self, *args, **kwargs):
raise tornado.web.HTTPError(405)
def create(self, *args, **kwargs):
raise tornado.web.HTTPError(405)
def read(self, _id, *args, **kwargs):
raise tornado.web.HTTPError(405)
def edit(self, _id, *args, **kwargs):
raise tornado.web.HTTPError(405)
def update(self, _id, *args, **kwargs):
raise tornado.web.HTTPError(405)
def destroy(self, _id, *args, **kwargs):
raise tornado.web.HTTPError(405)
def write_error(self, status_code, **kwargs):
if "application/json" in self.request.headers.get("Accept", ""):
self.set_header('Content-Type', 'application/json')
response = {
'error': True,
'response': None,
'code': status_code,
'status': self._reason,
}
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
exc_info = traceback.format_exception(*kwargs["exc_info"])
response['debug'] = exc_info
self.finish(response)
else:
super(ResourceHandler, self).write_error(status_code, **kwargs)
def write(self, chunk):
status_code = self.get_status()
if status_code == 200 and \
"application/json" in self.request.headers.get("Accept", ""):
self.set_header('Content-Type', 'application/json')
status_code = self.get_status()
response = {
'error': (status_code != 200 or None),
'code': status_code,
'status': tornado.httputil.responses[status_code],
}
if isinstance(chunk, (list,tuple)) and not isinstance(chunk, basestring):
super(ResourceHandler, self).write(tornado.escape.json_encode(response)[:-1]+',"reponse": [')
for i in range(len(chunk)):
if i > 0:
super(ResourceHandler, self).write(", " + tornado.escape.json_encode(chunk[i]))
else:
super(ResourceHandler, self).write(tornado.escape.json_encode(chunk[i]))
super(ResourceHandler, self).write("]}")
else:
response["response"] = chunk
super(ResourceHandler, self).write(response)
else:
super(ResourceHandler, self).write(chunk)
application = tornado.web.Application([
(action_routes('/animal'), ResourceHandler)
], debug=True)
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
|
glaubitz/fs-uae-debian | refs/heads/master | arcade/OpenGL/GL/MESA/__init__.py | 1729 | """OpenGL Extensions""" |
metapolator/mutatormathtools | refs/heads/master | python_modules/lib/python/robofab/test/test_dialogs.py | 10 | import robofab.interface.all.dialogs
reload(robofab.interface.all.dialogs)
from robofab.interface.all.dialogs import *
import unittest
__all__ = [
"AskString", #x
"AskYesNoCancel", #x
"FindGlyph",
"GetFile", #x
"GetFolder", #x
"GetFileOrFolder", #x
"Message", #x
"OneList",
"PutFile", #x
"SearchList",
"SelectFont",
"SelectGlyph",
"TwoChecks",
"TwoFields",
"ProgressBar",
]
class DialogRunner(object):
def __init__(self):
prompt = "The prompt for %s."
message = "The message for %s."
title = "The title for %s."
informativeText = "The informative text for %s."
fileTypes = ['ufo']
fileName = "The_filename.txt"
self.fonts = fonts = [self.makeTestFont(n) for n in range(4)]
t = "AskString"
try:
print "About to try", t
print "\t>>>", AskString(
message=prompt%t,
value='',
title=title%t
)
except NotImplementedError:
print t, "is not implemented."
t = "AskYesNoCancel"
try:
print "About to try", t
print "\t>>>", AskYesNoCancel(
message=prompt%t+" default set to 0",
title=title%t,
default=0,
informativeText=informativeText%t
)
print "\t>>>", AskYesNoCancel(
message=prompt%t+" default set to 1",
title=title%t,
default=1,
informativeText=informativeText%t
)
except NotImplementedError:
print t, "is not implemented."
t = "GetFile"
try:
print "About to try", t
print "\t>>>", GetFile(
message=message%t+" Only fileTypes "+`fileTypes`,
title=title%t,
directory=None,
fileName=fileName,
allowsMultipleSelection=False,
fileTypes=fileTypes
)
print "\t>>>", GetFile(
message=message%t+" All filetypes, allow multiple selection.",
title=title%t,
directory=None,
fileName=fileName,
allowsMultipleSelection=True,
fileTypes=None
)
except NotImplementedError:
print t, "is not implemented."
t = "GetFolder"
try:
print "About to try", t
print "\t>>>", GetFolder(
message=message%t,
title=title%t,
directory=None,
allowsMultipleSelection=False
)
print "\t>>>", GetFolder(
message=message%t + " Allow multiple selection.",
title=title%t,
directory=None,
allowsMultipleSelection=True
)
except NotImplementedError:
print t, "is not implemented."
t = "GetFileOrFolder"
try:
print "About to try", t
print "\t>>>", GetFileOrFolder(
message=message%t+" Only fileTypes "+`fileTypes`,
title=title%t,
directory=None,
fileName=fileName,
allowsMultipleSelection=False,
fileTypes=fileTypes
)
print "\t>>>", GetFileOrFolder(
message=message%t + " Allow multiple selection.",
title=title%t,
directory=None,
fileName=fileName,
allowsMultipleSelection=True,
fileTypes=None
)
except NotImplementedError:
print t, "is not implemented."
t = "Message"
try:
print "About to try", t
print "\t>>>", Message(
message=message%t,
title=title%t,
informativeText=informativeText%t
)
except NotImplementedError:
print t, "is not implemented."
t = "PutFile"
try:
print "About to try", t
print "\t>>>", PutFile(
message=message%t,
fileName=fileName,
)
except NotImplementedError:
print t, "is not implemented."
# t = "SelectFont"
# try:
#print "About to try", t
# print "\t>>>", SelectFont(
# message=message%t,
# title=title%t,
# allFonts=fonts,
# )
# except NotImplementedError:
# print t, "is not implemented."
# t = 'SelectGlyph'
# try:
#print "About to try", t
# print "\t>>>", SelectGlyph(
# font=fonts[0],
# message=message%t,
# title=title%t,
# )
# except NotImplementedError:
# print t, "is not implemented."
print 'No more tests.'
def makeTestFont(self, number):
from robofab.objects.objectsRF import RFont as _RFont
f = _RFont()
f.info.familyName = "TestFamily"
f.info.styleName = "weight%d"%number
f.info.postscriptFullName = "%s %s"%(f.info.familyName, f.info.styleName)
# make some glyphs
for name in ['A', 'B', 'C']:
g = f.newGlyph(name)
pen = g.getPen()
pen.moveTo((0,0))
pen.lineTo((500, 0))
pen.lineTo((500, 800))
pen.lineTo((0, 800))
pen.closePath()
return f
class DialogTests(unittest.TestCase):
def setUp(self):
from robofab.interface.all.dialogs import test
test()
def tearDown(self):
pass
def testDialogs(self):
import robofab.interface.all.dialogs
dialogModuleName = robofab.interface.all.dialogs.platformApplicationModuleName
application = robofab.interface.all.dialogs.application
if application is None and dialogModuleName == "dialogs_mac_vanilla":
# in vanilla, but not in a host application, run with executeVanillaTest
print
print "I'm running these tests with executeVanillaTest"
from vanilla.test.testTools import executeVanillaTest
executeVanillaTest(DialogRunner)
else:
print
print "I'm running these tests natively in"
DialogRunner()
if __name__ == "__main__":
from robofab.test.testSupport import runTests
runTests()
|
egoldchain/egoldchain-master | refs/heads/master | test/functional/segwit.py | 17 | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the SegWit changeover logic."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import sha256, CTransaction, CTxIn, COutPoint, CTxOut, COIN, ToHex, FromHex
from test_framework.address import script_to_p2sh, key_to_p2pkh
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE
from io import BytesIO
NODE_0 = 0
NODE_1 = 1
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
# Create a scriptPubKey corresponding to either a P2WPKH output for the
# given pubkey, or a P2WSH output of a 1-of-1 multisig for the given
# pubkey. Returns the hex encoding of the scriptPubKey.
def witness_script(use_p2wsh, pubkey):
if (use_p2wsh == False):
# P2WPKH instead
pubkeyhash = hash160(hex_str_to_bytes(pubkey))
pkscript = CScript([OP_0, pubkeyhash])
else:
# 1-of-1 multisig
witness_program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG])
scripthash = sha256(witness_program)
pkscript = CScript([OP_0, scripthash])
return bytes_to_hex_str(pkscript)
# Return a transaction (in hex) that spends the given utxo to a segwit output,
# optionally wrapping the segwit output using P2SH.
def create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount):
pkscript = hex_str_to_bytes(witness_script(use_p2wsh, pubkey))
if (encode_p2sh):
p2sh_hash = hash160(pkscript)
pkscript = CScript([OP_HASH160, p2sh_hash, OP_EQUAL])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), b""))
tx.vout.append(CTxOut(int(amount*COIN), pkscript))
return ToHex(tx)
# Create a transaction spending a given utxo to a segwit output corresponding
# to the given pubkey: use_p2wsh determines whether to use P2WPKH or P2WSH;
# encode_p2sh determines whether to wrap in P2SH.
# sign=True will have the given node sign the transaction.
# insert_redeem_script will be added to the scriptSig, if given.
def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
tx_to_witness = create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransaction(tx_to_witness)
assert("errors" not in signed or len(["errors"]) == 0)
return node.sendrawtransaction(signed["hex"])
else:
if (insert_redeem_script):
tx = FromHex(CTransaction(), tx_to_witness)
tx.vin[0].scriptSig += CScript([hex_str_to_bytes(insert_redeem_script)])
tx_to_witness = ToHex(tx)
return node.sendrawtransaction(tx_to_witness)
def getutxo(txid):
utxo = {}
utxo["vout"] = 0
utxo["txid"] = txid
return utxo
def find_unspent(node, min_value):
for utxo in node.listunspent():
if utxo['amount'] >= min_value:
return utxo
class SegWitTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [["-walletprematurewitness", "-rpcserialversion=0"],
["-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-rpcserialversion=1"],
["-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness"]]
def setup_network(self):
super().setup_network()
connect_nodes(self.nodes[0], 2)
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 2)
sync_blocks(self.nodes)
def skip_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 1)
sync_blocks(self.nodes)
def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
assert_raises_jsonrpc(-26, error_msg, send_to_witness, 1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
def fail_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
assert_raises_jsonrpc(-1, "CreateNewBlock: TestBlockValidity failed", node.generate, 1)
sync_blocks(self.nodes)
def run_test(self):
self.nodes[0].generate(161) #block 161
self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
self.nodes[0].generate(1) #block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
multiaddress = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]])
self.nodes[i].addwitnessaddress(newaddress)
self.nodes[i].addwitnessaddress(multiaddress)
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
p2sh_ids[i].append([])
wit_ids[i].append([])
for i in range(5):
for n in range(3):
for v in range(2):
wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))
self.nodes[0].generate(1) #block 163
sync_blocks(self.nodes)
# Make sure all nodes recognize the transactions as theirs
assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50)
assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999"))
assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999"))
self.nodes[0].generate(260) #block 423
sync_blocks(self.nodes)
self.log.info("Verify default node can't accept any witness format txs before fork")
# unsigned, no scriptsig
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False)
# unsigned with redeem script
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0]))
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0]))
# signed
self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V1][0], True)
self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V1][0], True)
self.log.info("Verify witness txs are skipped for mining before the fork")
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427
# TODO: An old node would see these txs without witnesses and be able to mine them
self.log.info("Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork")
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) #block 428
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) #block 429
self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)
self.log.info("Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork")
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, witness_script(False, self.pubkey[2])) #block 430
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, witness_script(True, self.pubkey[2])) #block 431
self.log.info("Verify previous witness txs skipped for mining can now be mined")
assert_equal(len(self.nodes[2].getrawmempool()), 4)
block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3)
sync_blocks(self.nodes)
assert_equal(len(self.nodes[2].getrawmempool()), 0)
segwit_tx_list = self.nodes[2].getblock(block[0])["tx"]
assert_equal(len(segwit_tx_list), 5)
self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False))
assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False))
for i in range(len(segwit_tx_list)):
tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness()))
self.log.info("Verify witness txs without witness data are invalid after the fork")
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][2], False)
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][2], False)
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][2], False, witness_script(False, self.pubkey[2]))
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], False, witness_script(True, self.pubkey[2]))
self.log.info("Verify default node can now use witness txs")
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435
self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data
assert(tmpl['weightlimit'] == 4000000)
assert(tmpl['sigoplimit'] == 80000)
assert(tmpl['transactions'][0]['txid'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 8)
self.nodes[0].generate(1) # Mine a block to clear the gbt cache
self.log.info("Non-segwit miners are able to use GBT response after activation.")
# Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
# tx2 (segwit input, paying to a non-segwit output) ->
# tx3 (non-segwit input, paying to a non-segwit output).
# tx1 is allowed to appear in the block, but no others.
txid1 = send_to_witness(1, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996"))
hex_tx = self.nodes[0].gettransaction(txid)['hex']
tx = FromHex(CTransaction(), hex_tx)
assert(tx.wit.is_null()) # This should not be a segwit input
assert(txid1 in self.nodes[0].getrawmempool())
# Now create tx2, which will spend from txid1.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
tx.vout.append(CTxOut(int(49.99*COIN), CScript([OP_TRUE])))
tx2_hex = self.nodes[0].signrawtransaction(ToHex(tx))['hex']
txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
tx = FromHex(CTransaction(), tx2_hex)
assert(not tx.wit.is_null())
# Now create tx3, which will spend from txid2
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
tx.vout.append(CTxOut(int(49.95*COIN), CScript([OP_TRUE]))) # Huge fee
tx.calc_sha256()
txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
assert(tx.wit.is_null())
assert(txid3 in self.nodes[0].getrawmempool())
# Now try calling getblocktemplate() without segwit support.
template = self.nodes[0].getblocktemplate()
# Check that tx1 is the only transaction of the 3 in the template.
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid2 not in template_txids and txid3 not in template_txids)
assert(txid1 in template_txids)
# Check that running with segwit support results in all 3 being included.
template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid1 in template_txids)
assert(txid2 in template_txids)
assert(txid3 in template_txids)
# Mine a block to clear the gbt cache again.
self.nodes[0].generate(1)
self.log.info("Verify behaviour of importaddress, addwitnessaddress and listunspent")
# Some public keys to be used later
pubkeys = [
"0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
"02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
"04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
"02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
"036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
"0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
"0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
]
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
assert ((self.nodes[0].validateaddress(uncompressed_spendable_address[0])['iscompressed'] == False))
assert ((self.nodes[0].validateaddress(compressed_spendable_address[0])['iscompressed'] == True))
self.nodes[0].importpubkey(pubkeys[0])
compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
self.nodes[0].importpubkey(pubkeys[1])
compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
self.nodes[0].importpubkey(pubkeys[2])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]
spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress
spendable_after_importaddress = [] # These outputs should be seen after importaddress
solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable
unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress
solvable_anytime = [] # These outputs should be solvable after importpubkey
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]]))
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]]))
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]]))
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]]))
unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"]
# Test multisig_without_privkey
# We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
# Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.
multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])
script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG])
solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))
for i in compressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with compressed keys should always be spendable
spendable_anytime.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with compressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH, and witness with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2wpkh, p2sh_p2wpkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with uncompressed keys should always be spendable
spendable_anytime.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# witness with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in compressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
# Multisig without private is not seen after addmultisigaddress, but seen after importaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with compressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH, and witness with compressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2wpkh, p2sh_p2wpkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
solvable_after_importaddress.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# witness with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
op1 = CScript([OP_1])
op0 = CScript([OP_0])
# 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)]
unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG])
unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
p2wshop1 = CScript([OP_0, sha256(op1)])
unsolvable_after_importaddress.append(unsolvablep2pkh)
unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script
unsolvable_after_importaddress.append(p2wshop1)
unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided
unsolvable_after_importaddress.append(p2shop0)
spendable_txid = []
solvable_txid = []
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)
importlist = []
for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
bare = hex_str_to_bytes(v['hex'])
importlist.append(bytes_to_hex_str(bare))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
else:
pubkey = hex_str_to_bytes(v['pubkey'])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG])
importlist.append(bytes_to_hex_str(p2pk))
importlist.append(bytes_to_hex_str(p2pkh))
importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))
importlist.append(bytes_to_hex_str(unsolvablep2pkh))
importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
importlist.append(bytes_to_hex_str(op1))
importlist.append(bytes_to_hex_str(p2wshop1))
for i in importlist:
# import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
# exceptions and continue.
try:
self.nodes[0].importaddress(i,"",False,True)
except JSONRPCException as exp:
assert_equal(exp.error["message"], "The wallet already contains the private key for this address or script")
assert_equal(exp.error["code"], -4)
self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that no witness address should be returned by unsolvable addresses
for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address:
assert_raises_jsonrpc(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# addwitnessaddress should return a witness addresses even if keys are not in the wallet
self.nodes[0].addwitnessaddress(multisig_without_privkey_address)
for i in compressed_spendable_address + compressed_solvable_address:
witaddress = self.nodes[0].addwitnessaddress(i)
# addwitnessaddress should return the same address if it is a known P2SH-witness address
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# Repeat some tests. This time we don't add witness scripts with importaddress
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]
self.nodes[0].importpubkey(pubkeys[5])
compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
self.nodes[0].importpubkey(pubkeys[6])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]
spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress
solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]]))
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]]))
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]]))
premature_witaddress = []
for i in compressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH are spendable after addwitnessaddress
spendable_after_addwitnessaddress.extend([p2wpkh, p2sh_p2wpkh])
premature_witaddress.append(script_to_p2sh(p2wpkh))
for i in uncompressed_spendable_address + uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in compressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
# P2WSH multisig without private key are seen after addwitnessaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after addwitnessaddress
solvable_after_addwitnessaddress.extend([p2wpkh, p2sh_p2wpkh])
premature_witaddress.append(script_to_p2sh(p2wpkh))
self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
# premature_witaddress are not accepted until the script is added with addwitnessaddress first
for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress:
# This will raise an exception
assert_raises_jsonrpc(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# after importaddress it should pass addwitnessaddress
v = self.nodes[0].validateaddress(compressed_solvable_address[1])
self.nodes[0].importaddress(v['hex'],"",False,True)
for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
witaddress = self.nodes[0].addwitnessaddress(i)
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress, 1))
self.mine_and_test_listunspent(unseen_anytime, 0)
# Check that spendable outputs are really spendable
self.create_and_mine_tx_from_txids(spendable_txid)
# import all the private keys so solvable addresses become spendable
self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
self.create_and_mine_tx_from_txids(solvable_txid)
def mine_and_test_listunspent(self, script_list, ismine):
utxo = find_unspent(self.nodes[0], 50)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int('0x'+utxo['txid'],0), utxo['vout'])))
for i in script_list:
tx.vout.append(CTxOut(10000000, i))
tx.rehash()
signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
txid = self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
watchcount = 0
spendcount = 0
for i in self.nodes[0].listunspent():
if (i['txid'] == txid):
watchcount += 1
if (i['spendable'] == True):
spendcount += 1
if (ismine == 2):
assert_equal(spendcount, len(script_list))
elif (ismine == 1):
assert_equal(watchcount, len(script_list))
assert_equal(spendcount, 0)
else:
assert_equal(watchcount, 0)
return txid
def p2sh_address_to_script(self,v):
bare = CScript(hex_str_to_bytes(v['hex']))
p2sh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2wsh = CScript([OP_0, sha256(bare)])
p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL])
return([bare, p2sh, p2wsh, p2sh_p2wsh])
def p2pkh_address_to_script(self,v):
pubkey = hex_str_to_bytes(v['pubkey'])
p2wpkh = CScript([OP_0, hash160(pubkey)])
p2sh_p2wpkh = CScript([OP_HASH160, hash160(p2wpkh), OP_EQUAL])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2sh_p2pk = CScript([OP_HASH160, hash160(p2pk), OP_EQUAL])
p2sh_p2pkh = CScript([OP_HASH160, hash160(p2pkh), OP_EQUAL])
p2wsh_p2pk = CScript([OP_0, sha256(p2pk)])
p2wsh_p2pkh = CScript([OP_0, sha256(p2pkh)])
p2sh_p2wsh_p2pk = CScript([OP_HASH160, hash160(p2wsh_p2pk), OP_EQUAL])
p2sh_p2wsh_p2pkh = CScript([OP_HASH160, hash160(p2wsh_p2pkh), OP_EQUAL])
return [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]
def create_and_mine_tx_from_txids(self, txids, success = True):
tx = CTransaction()
for i in txids:
txtmp = CTransaction()
txraw = self.nodes[0].getrawtransaction(i)
f = BytesIO(hex_str_to_bytes(txraw))
txtmp.deserialize(f)
for j in range(len(txtmp.vout)):
tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j)))
tx.vout.append(CTxOut(0, CScript()))
tx.rehash()
signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
if __name__ == '__main__':
SegWitTest().main()
|
rodgomes/django-waffle | refs/heads/master | waffle/tests/test_testutils.py | 4 | from decimal import Decimal
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase, RequestFactory
import waffle
from waffle.models import Switch, Flag, Sample
from waffle.testutils import override_switch, override_flag, override_sample
class OverrideSwitchTests(TestCase):
def test_switch_existed_and_was_active(self):
Switch.objects.create(name='foo', active=True)
with override_switch('foo', active=True):
assert waffle.switch_is_active('foo')
with override_switch('foo', active=False):
assert not waffle.switch_is_active('foo')
# make sure it didn't change 'active' value
assert Switch.objects.get(name='foo').active
def test_switch_existed_and_was_NOT_active(self):
Switch.objects.create(name='foo', active=False)
with override_switch('foo', active=True):
assert waffle.switch_is_active('foo')
with override_switch('foo', active=False):
assert not waffle.switch_is_active('foo')
# make sure it didn't change 'active' value
assert not Switch.objects.get(name='foo').active
def test_new_switch(self):
assert not Switch.objects.filter(name='foo').exists()
with override_switch('foo', active=True):
assert waffle.switch_is_active('foo')
with override_switch('foo', active=False):
assert not waffle.switch_is_active('foo')
assert not Switch.objects.filter(name='foo').exists()
def test_as_decorator(self):
assert not Switch.objects.filter(name='foo').exists()
@override_switch('foo', active=True)
def test_enabled():
assert waffle.switch_is_active('foo')
test_enabled()
@override_switch('foo', active=False)
def test_disabled():
assert not waffle.switch_is_active('foo')
test_disabled()
assert not Switch.objects.filter(name='foo').exists()
def test_restores_after_exception(self):
Switch.objects.create(name='foo', active=True)
def inner():
with override_switch('foo', active=False):
raise RuntimeError("Trying to break")
with self.assertRaises(RuntimeError):
inner()
assert Switch.objects.get(name='foo').active
def test_restores_after_exception_in_decorator(self):
Switch.objects.create(name='foo', active=True)
@override_switch('foo', active=False)
def inner():
raise RuntimeError("Trying to break")
with self.assertRaises(RuntimeError):
inner()
assert Switch.objects.get(name='foo').active
def req():
r = RequestFactory().get('/')
r.user = AnonymousUser()
return r
class OverrideFlagTests(TestCase):
def test_flag_existed_and_was_active(self):
Flag.objects.create(name='foo', everyone=True)
with override_flag('foo', active=True):
assert waffle.flag_is_active(req(), 'foo')
with override_flag('foo', active=False):
assert not waffle.flag_is_active(req(), 'foo')
assert Flag.objects.get(name='foo').everyone
def test_flag_existed_and_was_inactive(self):
Flag.objects.create(name='foo', everyone=False)
with override_flag('foo', active=True):
assert waffle.flag_is_active(req(), 'foo')
with override_flag('foo', active=False):
assert not waffle.flag_is_active(req(), 'foo')
assert Flag.objects.get(name='foo').everyone is False
def test_flag_existed_and_was_null(self):
Flag.objects.create(name='foo', everyone=None)
with override_flag('foo', active=True):
assert waffle.flag_is_active(req(), 'foo')
with override_flag('foo', active=False):
assert not waffle.flag_is_active(req(), 'foo')
assert Flag.objects.get(name='foo').everyone is None
def test_flag_did_not_exist(self):
assert not Flag.objects.filter(name='foo').exists()
with override_flag('foo', active=True):
assert waffle.flag_is_active(req(), 'foo')
with override_flag('foo', active=False):
assert not waffle.flag_is_active(req(), 'foo')
assert not Flag.objects.filter(name='foo').exists()
class OverrideSampleTests(TestCase):
def test_sample_existed_and_was_100(self):
Sample.objects.create(name='foo', percent='100.0')
with override_sample('foo', active=True):
assert waffle.sample_is_active('foo')
with override_sample('foo', active=False):
assert not waffle.sample_is_active('foo')
self.assertEquals(Decimal('100.0'),
Sample.objects.get(name='foo').percent)
def test_sample_existed_and_was_0(self):
Sample.objects.create(name='foo', percent='0.0')
with override_sample('foo', active=True):
assert waffle.sample_is_active('foo')
with override_sample('foo', active=False):
assert not waffle.sample_is_active('foo')
self.assertEquals(Decimal('0.0'),
Sample.objects.get(name='foo').percent)
def test_sample_existed_and_was_50(self):
Sample.objects.create(name='foo', percent='50.0')
with override_sample('foo', active=True):
assert waffle.sample_is_active('foo')
with override_sample('foo', active=False):
assert not waffle.sample_is_active('foo')
self.assertEquals(Decimal('50.0'),
Sample.objects.get(name='foo').percent)
def test_sample_did_not_exist(self):
assert not Sample.objects.filter(name='foo').exists()
with override_sample('foo', active=True):
assert waffle.sample_is_active('foo')
with override_sample('foo', active=False):
assert not waffle.sample_is_active('foo')
assert not Sample.objects.filter(name='foo').exists()
@override_switch('foo', active=False)
class OverrideSwitchOnClassTests(TestCase):
def setUp(self):
assert not Switch.objects.filter(name='foo').exists()
Switch.objects.create(name='foo', active=True)
def test_undecorated_method_is_set_properly_for_switch(self):
self.assertFalse(waffle.switch_is_active('foo'))
@override_flag('foo', active=False)
class OverrideFlagOnClassTests(TestCase):
def setUp(self):
assert not Flag.objects.filter(name='foo').exists()
Flag.objects.create(name='foo', everyone=True)
def test_undecorated_method_is_set_properly_for_flag(self):
self.assertFalse(waffle.flag_is_active(req(), 'foo'))
@override_sample('foo', active=False)
class OverrideSampleOnClassTests(TestCase):
def setUp(self):
assert not Sample.objects.filter(name='foo').exists()
Sample.objects.create(name='foo', percent='100.0')
def test_undecorated_method_is_set_properly_for_sample(self):
self.assertFalse(waffle.sample_is_active('foo'))
|
ezequielpereira/Time-Line | refs/heads/master | libs/wx/lib/ogl/_diagram.py | 6 | # -*- coding: iso-8859-1 -*-
#----------------------------------------------------------------------------
# Name: diagram.py
# Purpose: Diagram class
#
# Author: Pierre Hjälm (from C++ original by Julian Smart)
#
# Created: 2004-05-08
# RCS-ID: $Id: _diagram.py 37267 2006-02-03 06:51:34Z RD $
# Copyright: (c) 2004 Pierre Hjälm - 1998 Julian Smart
# Licence: wxWindows license
#----------------------------------------------------------------------------
import wx
DEFAULT_MOUSE_TOLERANCE = 3
class Diagram(object):
"""Encapsulates an entire diagram, with methods for drawing. A diagram has
an associated ShapeCanvas.
Derived from:
Object
"""
def __init__(self):
self._diagramCanvas = None
self._quickEditMode = False
self._snapToGrid = True
self._gridSpacing = 5.0
self._shapeList = []
self._mouseTolerance = DEFAULT_MOUSE_TOLERANCE
def Redraw(self, dc):
"""Draw the shapes in the diagram on the specified device context."""
if self._shapeList:
if self.GetCanvas():
self.GetCanvas().SetCursor(wx.HOURGLASS_CURSOR)
for object in self._shapeList:
object.Draw(dc)
if self.GetCanvas():
self.GetCanvas().SetCursor(wx.STANDARD_CURSOR)
def Clear(self, dc):
"""Clear the specified device context."""
dc.Clear()
def AddShape(self, object, addAfter = None):
"""Adds a shape to the diagram. If addAfter is not None, the shape
will be added after addAfter.
"""
if not object in self._shapeList:
if addAfter:
self._shapeList.insert(self._shapeList.index(addAfter) + 1, object)
else:
self._shapeList.append(object)
object.SetCanvas(self.GetCanvas())
def InsertShape(self, object):
"""Insert a shape at the front of the shape list."""
self._shapeList.insert(0, object)
def RemoveShape(self, object):
"""Remove the shape from the diagram (non-recursively) but do not
delete it.
"""
if object in self._shapeList:
self._shapeList.remove(object)
def RemoveAllShapes(self):
"""Remove all shapes from the diagram but do not delete the shapes."""
self._shapeList = []
def DeleteAllShapes(self):
"""Remove and delete all shapes in the diagram."""
for shape in self._shapeList[:]:
if not shape.GetParent():
self.RemoveShape(shape)
shape.Delete()
def ShowAll(self, show):
"""Call Show for each shape in the diagram."""
for shape in self._shapeList:
shape.Show(show)
def DrawOutline(self, dc, x1, y1, x2, y2):
"""Draw an outline rectangle on the current device context."""
dc.SetPen(wx.Pen(wx.Color(0, 0, 0), 1, wx.DOT))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawLines([[x1, y1], [x2, y1], [x2, y2], [x1, y2], [x1, y1]])
def RecentreAll(self, dc):
"""Make sure all text that should be centred, is centred."""
for shape in self._shapeList:
shape.Recentre(dc)
def SetCanvas(self, canvas):
"""Set the canvas associated with this diagram."""
self._diagramCanvas = canvas
def GetCanvas(self):
"""Return the shape canvas associated with this diagram."""
return self._diagramCanvas
def FindShape(self, id):
"""Return the shape for the given identifier."""
for shape in self._shapeList:
if shape.GetId() == id:
return shape
return None
def Snap(self, x, y):
"""'Snaps' the coordinate to the nearest grid position, if
snap-to-grid is on."""
if self._snapToGrid:
return self._gridSpacing * int(x / self._gridSpacing + 0.5), self._gridSpacing * int(y / self._gridSpacing + 0.5)
return x, y
def SetGridSpacing(self, spacing):
"""Sets grid spacing."""
self._gridSpacing = spacing
def SetSnapToGrid(self, snap):
"""Sets snap-to-grid mode."""
self._snapToGrid = snap
def GetGridSpacing(self):
"""Return the grid spacing."""
return self._gridSpacing
def GetSnapToGrid(self):
"""Return snap-to-grid mode."""
return self._snapToGrid
def SetQuickEditMode(self, mode):
"""Set quick-edit-mode on of off.
In this mode, refreshes are minimized, but the diagram may need
manual refreshing occasionally.
"""
self._quickEditMode = mode
def GetQuickEditMode(self):
"""Return quick edit mode."""
return self._quickEditMode
def SetMouseTolerance(self, tolerance):
"""Set the tolerance within which a mouse move is ignored.
The default is 3 pixels.
"""
self._mouseTolerance = tolerance
def GetMouseTolerance(self):
"""Return the tolerance within which a mouse move is ignored."""
return self._mouseTolerance
def GetShapeList(self):
"""Return the internal shape list."""
return self._shapeList
def GetCount(self):
"""Return the number of shapes in the diagram."""
return len(self._shapeList)
|
ToontownUprising/src | refs/heads/master | toontown/parties/DistributedPartyFireworksActivity.py | 4 | from pandac.PandaModules import Vec3
from pandac.PandaModules import OmniBoundingVolume
from pandac.PandaModules import AlphaTestAttrib
from pandac.PandaModules import RenderAttrib
from direct.actor.Actor import Actor
from direct.interval.IntervalGlobal import *
from direct.distributed.ClockDelta import globalClockDelta
from toontown.effects.FireworkShowMixin import FireworkShowMixin
from toontown.effects.RocketExplosion import RocketExplosion
from toontown.toonbase import TTLocalizer
from PartyGlobals import FireworkShows
from PartyGlobals import ActivityIds
from PartyGlobals import ActivityTypes
from PartyGlobals import FireworksStartedEvent
from PartyGlobals import FireworksFinishedEvent
from PartyGlobals import FireworksPostLaunchDelay
from PartyGlobals import RocketSoundDelay
from PartyGlobals import RocketDirectionDelay
from DistributedPartyActivity import DistributedPartyActivity
from activityFSMs import FireworksActivityFSM
import PartyGlobals
class DistributedPartyFireworksActivity(DistributedPartyActivity, FireworkShowMixin):
notify = directNotify.newCategory('DistributedPartyFireworksActivity')
def __init__(self, cr):
DistributedPartyFireworksActivity.notify.debug('__init__')
DistributedPartyActivity.__init__(self, cr, ActivityIds.PartyFireworks, ActivityTypes.HostInitiated, wantLever=True)
FireworkShowMixin.__init__(self, restorePlaygroundMusic=True, startDelay=FireworksPostLaunchDelay)
def setEventId(self, eventId):
DistributedPartyFireworksActivity.notify.debug('setEventId( %s )' % FireworkShows.getString(eventId))
self.eventId = eventId
def setShowStyle(self, showStyle):
DistributedPartyFireworksActivity.notify.debug('setShowStyle( %d )' % showStyle)
self.showStyle = showStyle
def setSongId(self, songId):
self.songId = songId
def load(self):
DistributedPartyFireworksActivity.notify.debug('load')
DistributedPartyActivity.load(self)
self.eventId = PartyGlobals.FireworkShows.Summer
self.launchPadModel = loader.loadModel('phase_13/models/parties/launchPad')
self.launchPadModel.setH(90.0)
self.launchPadModel.setPos(0.0, -18.0, 0.0)
self.launchPadModel.reparentTo(self.root)
railingsCollection = self.launchPadModel.findAllMatches('**/launchPad_mesh/*railing*')
for i in xrange(railingsCollection.getNumPaths()):
railingsCollection[i].setAttrib(AlphaTestAttrib.make(RenderAttrib.MGreater, 0.75))
leverLocator = self.launchPadModel.find('**/RocketLever_locator')
self.lever.setPosHpr(Vec3.zero(), Vec3.zero())
self.lever.reparentTo(leverLocator)
self.toonPullingLeverInterval = None
self.sign.reparentTo(self.launchPadModel.find('**/launchPad_sign_locator'))
self.rocketActor = Actor('phase_13/models/parties/rocket_model', {'launch': 'phase_13/models/parties/rocket_launch'})
rocketLocator = self.launchPadModel.find('**/rocket_locator')
self.rocketActor.reparentTo(rocketLocator)
self.rocketActor.node().setBound(OmniBoundingVolume())
self.rocketActor.node().setFinal(True)
effectsLocator = self.rocketActor.find('**/joint1')
self.rocketExplosionEffect = RocketExplosion(effectsLocator, rocketLocator)
self.rocketParticleSeq = None
self.launchSound = base.loadSfx('phase_13/audio/sfx/rocket_launch.ogg')
self.activityFSM = FireworksActivityFSM(self)
self.activityFSM.request('Idle')
return
def unload(self):
DistributedPartyFireworksActivity.notify.debug('unload')
taskMgr.remove(self.taskName('delayedStartShow'))
if self.rocketParticleSeq:
self.rocketParticleSeq.pause()
self.rocketParticleSeq = None
self.launchPadModel.removeNode()
del self.launchPadModel
del self.toonPullingLeverInterval
self.rocketActor.delete()
self.rocketExplosionEffect.destroy()
self.activityFSM.request('Disabled')
del self.rocketActor
del self.launchSound
del self.activityFSM
del self.eventId
del self.showStyle
DistributedPartyActivity.unload(self)
return
def _leverPulled(self, collEntry):
DistributedPartyFireworksActivity.notify.debug('_leverPulled')
hostPulledLever = DistributedPartyActivity._leverPulled(self, collEntry)
if self.activityFSM.getCurrentOrNextState() == 'Active':
self.showMessage(TTLocalizer.PartyFireworksAlreadyActive)
elif self.activityFSM.getCurrentOrNextState() == 'Disabled':
self.showMessage(TTLocalizer.PartyFireworksAlreadyDone)
elif self.activityFSM.getCurrentOrNextState() == 'Idle':
if hostPulledLever:
base.cr.playGame.getPlace().fsm.request('activity')
self.toonPullingLeverInterval = self.getToonPullingLeverInterval(base.localAvatar)
self.toonPullingLeverInterval.append(Func(self.d_toonJoinRequest))
self.toonPullingLeverInterval.append(Func(base.cr.playGame.getPlace().fsm.request, 'walk'))
self.toonPullingLeverInterval.start()
else:
self.showMessage(TTLocalizer.PartyOnlyHostLeverPull)
def setState(self, newState, timestamp):
DistributedPartyFireworksActivity.notify.debug('setState( newState=%s, ... )' % newState)
DistributedPartyActivity.setState(self, newState, timestamp)
if newState == 'Active':
self.activityFSM.request(newState, timestamp)
else:
self.activityFSM.request(newState)
def startIdle(self):
DistributedPartyFireworksActivity.notify.debug('startIdle')
def finishIdle(self):
DistributedPartyFireworksActivity.notify.debug('finishIdle')
def startActive(self, showStartTimestamp):
DistributedPartyFireworksActivity.notify.debug('startActive')
messenger.send(FireworksStartedEvent)
timeSinceStart = globalClockDelta.localElapsedTime(showStartTimestamp)
if timeSinceStart > self.rocketActor.getDuration('launch'):
self.rocketActor.hide()
if timeSinceStart < 60:
self.startShow(self.eventId, self.showStyle, self.songId, showStartTimestamp)
else:
self.rocketActor.play('launch')
self.rocketParticleSeq = Sequence(Wait(RocketSoundDelay), Func(base.playSfx, self.launchSound), Func(self.rocketExplosionEffect.start), Wait(RocketDirectionDelay), LerpHprInterval(self.rocketActor, 4.0, Vec3(0, 0, -60)), Func(self.rocketExplosionEffect.end), Func(self.rocketActor.hide))
self.rocketParticleSeq.start()
taskMgr.doMethodLater(FireworksPostLaunchDelay, self.startShow, self.taskName('delayedStartShow'), extraArgs=[self.eventId,
self.showStyle,
self.songId,
showStartTimestamp,
self.root])
def finishActive(self):
self.rocketParticleSeq = None
DistributedPartyFireworksActivity.notify.debug('finishActive')
messenger.send(FireworksFinishedEvent)
taskMgr.remove(self.taskName('delayedStartShow'))
FireworkShowMixin.disable(self)
return
def startDisabled(self):
DistributedPartyFireworksActivity.notify.debug('startDisabled')
if not self.rocketActor.isEmpty():
self.rocketActor.hide()
def finishDisabled(self):
DistributedPartyFireworksActivity.notify.debug('finishDisabled')
def handleToonDisabled(self, toonId):
self.notify.warning('handleToonDisabled no implementation yet')
|
vrenkens/Nabu-asr | refs/heads/master | nabu/neuralnetworks/components/ops.py | 2 | '''@file ops.py
some operations'''
import tensorflow as tf
def pyramid_stack(inputs, sequence_lengths, numsteps, axis=2, scope=None):
'''
concatenate each two consecutive elements
Args:
inputs: A time minor tensor [batch_size, time, input_size]
sequence_lengths: the length of the input sequences
numsteps: number of time steps to concatenate
axis: the axis where the inputs should be stacked
scope: the current scope
Returns:
inputs: Concatenated inputs
[batch_size, time/numsteps, input_size*numsteps]
sequence_lengths: the lengths of the inputs sequences [batch_size]
'''
with tf.name_scope(scope or 'pyramid_stack'):
numdims = len(inputs.shape)
#convert imputs to time major
time_major_input = tf.transpose(inputs, [1, 0] + range(2, numdims))
#pad the inputs to an appropriate length length
length = tf.cast(tf.shape(time_major_input)[0], tf.float32)
pad_length = tf.ceil(length/numsteps)*numsteps - length
pad_length = tf.cast(pad_length, tf.int32)
pad_shape = tf.concat([[pad_length],
tf.shape(time_major_input)[1:]], 0)
padding = tf.zeros(pad_shape, dtype=inputs.dtype)
padded_inputs = tf.concat([time_major_input, padding], 0)
#get the new length
length = tf.shape(padded_inputs)[0]
#seperate the inputs for every concatenated timestep
seperated = []
for i in range(numsteps):
seperated.append(tf.gather(
padded_inputs, tf.range(i, length, numsteps)))
#concatenate odd and even inputs
time_major_outputs = tf.concat(seperated, axis)
#convert back to time minor
outputs = tf.transpose(time_major_outputs, [1, 0] + range(2, numdims))
#compute the new sequence length
output_sequence_lengths = tf.cast(tf.ceil(tf.cast(sequence_lengths,
tf.float32)/numsteps),
tf.int32)
return outputs, output_sequence_lengths
def stack_seq(sequential, sequence_lengths, name=None):
'''
remove padding and stack sequences
Args:
sequential: the sequential data which is a [batch_size, max_length, dim]
tensor
sequence_lengths: a [batch_size] vector containing the sequence lengths
name: [optional] the name of the operation
Returns:
non sequential data, which is a TxF tensor where T is the sum of all
sequence lengths
'''
with tf.name_scope(name or 'stack_seq'):
indices = get_indices(sequence_lengths)
#create the values
tensor = tf.gather_nd(sequential, indices)
tensor.set_shape([None] + sequential.shape.as_list()[2:])
return tensor
def unstack_seq(nonseq, sequence_lengths, name=None):
'''
unstack sequences and add padding
Args:
nonseq: the non sequential data which is a
[sum(sequence_lengths) x dim] tensor
sequence_lengths: a [batch_size] vector containing the sequence lengths
name: [optional] the name of the operation
Returns:
sequential data, which is a [batch_size, max_length, dim] tensor
'''
with tf.name_scope(name or 'unstack_seq'):
max_length = tf.reduce_max(sequence_lengths)
batch_size = tf.size(sequence_lengths)
unstacked = tf.TensorArray(
dtype=nonseq.dtype,
size=batch_size,
element_shape=tf.TensorShape([None]).concatenate(nonseq.shape[1:]),
infer_shape=False)
unstacked = unstacked.split(nonseq, sequence_lengths)
unstacked = map_ta(
lambda x: pad_to(x, max_length),
unstacked
)
unstacked = unstacked.stack()
unstacked.set_shape([None, None] + nonseq.shape.as_list()[1:])
return unstacked
def dense_sequence_to_sparse(sequences, sequence_lengths):
'''convert sequence dense representations to sparse representations
Args:
sequences: the dense sequences as a [batch_size x max_length] tensor
sequence_lengths: the sequence lengths as a [batch_size] vector
Returns:
the sparse tensor representation of the sequences
'''
with tf.name_scope('dense_sequence_to_sparse'):
#get all the non padding sequences
indices = tf.cast(get_indices(sequence_lengths), tf.int64)
#create the values
values = tf.gather_nd(sequences, indices)
#the shape
shape = tf.cast(tf.shape(sequences), tf.int64)
sparse = tf.SparseTensor(indices, values, shape)
return sparse
def get_indices(sequence_length):
'''get the indices corresponding to sequences (and not padding)
Args:
sequence_length: the sequence_lengths as a N-D tensor
Returns:
A [sum(sequence_length) x N-1] Tensor containing the indices'''
with tf.name_scope('get_indices'):
numdims = len(sequence_length.shape)
#get the maximal length
max_length = tf.reduce_max(sequence_length)
sizes = tf.shape(sequence_length)
range_tensor = tf.range(max_length)
for i in range(1, numdims):
tile_dims = [1]*i + [sizes[i]]
range_tensor = tf.tile(tf.expand_dims(range_tensor, i), tile_dims)
indices = tf.where(tf.less(range_tensor,
tf.expand_dims(sequence_length, numdims)))
return indices
def pad_to(tensor, length, axis=0, name=None):
'''pad the tensor to a certain length
args:
- tensor: the tensor to pad
- length: the length to pad to, has to be larger than tensor.shape[axis]
- axis: the axis to pad
- name: the name of the operation
returns:
the padded tensor
'''
with tf.name_scope(name or 'pad_to'):
rank = tensor.shape.ndims
orig_length = tf.shape(tensor)[axis]
assert_op = tf.assert_less(axis, rank,
message='axis has to be less than rank')
with tf.control_dependencies([assert_op]):
assert_op = tf.assert_less_equal(
orig_length, length,
message='target length less than original length')
with tf.control_dependencies([assert_op]):
paddings = tf.SparseTensor(
indices=[[axis, 1]],
values=tf.expand_dims(length-orig_length, 0),
dense_shape=[rank, 2])
padded = tf.pad(tensor, tf.sparse_tensor_to_dense(paddings))
return padded
def map_ta(fn, ta):
'''
apply fn to each element in tensorarray
args:
fn: the function to apply
ta: the tensorarray
returns:
the resulting tensorarray
'''
def body(index, ta_out):
'''the body of the while loop'''
ta_out = ta_out.write(index, fn(ta.read(index)))
newindex = index + 1
return newindex, ta_out
def condition(index, ta_out):
'''loop condition'''
return tf.not_equal(index, ta_out.size())
ta_init = tf.TensorArray(
dtype=ta.dtype,
size=ta.size()
)
index_init = 0
_, mapped = tf.while_loop(
cond=condition,
body=body,
loop_vars=[index_init, ta_init]
)
return mapped
|
ChromiumWebApps/chromium | refs/heads/master | third_party/closure_linter/closure_linter/checker.py | 135 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Core methods for checking JS files for common style guide violations."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import gflags as flags
from closure_linter import checkerbase
from closure_linter import closurizednamespacesinfo
from closure_linter import ecmametadatapass
from closure_linter import javascriptlintrules
from closure_linter import javascriptstatetracker
from closure_linter.common import lintrunner
flags.DEFINE_list('limited_doc_files', ['dummy.js', 'externs.js'],
'List of files with relaxed documentation checks. Will not '
'report errors for missing documentation, some missing '
'descriptions, or methods whose @return tags don\'t have a '
'matching return statement.')
flags.DEFINE_list('closurized_namespaces', '',
'Namespace prefixes, used for testing of'
'goog.provide/require')
flags.DEFINE_list('ignored_extra_namespaces', '',
'Fully qualified namespaces that should be not be reported '
'as extra by the linter.')
class JavaScriptStyleChecker(checkerbase.CheckerBase):
"""Checker that applies JavaScriptLintRules."""
def __init__(self, error_handler):
"""Initialize an JavaScriptStyleChecker object.
Args:
error_handler: Error handler to pass all errors to.
"""
self._namespaces_info = None
if flags.FLAGS.closurized_namespaces:
self._namespaces_info = (
closurizednamespacesinfo.ClosurizedNamespacesInfo(
flags.FLAGS.closurized_namespaces,
flags.FLAGS.ignored_extra_namespaces))
checkerbase.CheckerBase.__init__(
self,
error_handler=error_handler,
lint_rules=javascriptlintrules.JavaScriptLintRules(
self._namespaces_info),
state_tracker=javascriptstatetracker.JavaScriptStateTracker(),
metadata_pass=ecmametadatapass.EcmaMetaDataPass(),
limited_doc_files=flags.FLAGS.limited_doc_files)
def _CheckTokens(self, token, parse_error, debug_tokens):
"""Checks a token stream for lint warnings/errors.
Adds a separate pass for computing dependency information based on
goog.require and goog.provide statements prior to the main linting pass.
Args:
token: The first token in the token stream.
parse_error: A ParseError if any errors occurred.
debug_tokens: Whether every token should be printed as it is encountered
during the pass.
Returns:
A boolean indicating whether the full token stream could be checked or if
checking failed prematurely.
"""
# To maximize the amount of errors that get reported before a parse error
# is displayed, don't run the dependency pass if a parse error exists.
if self._namespaces_info and not parse_error:
self._namespaces_info.Reset()
result = (self._ExecutePass(token, self._DependencyPass) and
self._ExecutePass(token, self._LintPass,
debug_tokens=debug_tokens))
else:
result = self._ExecutePass(token, self._LintPass, parse_error,
debug_tokens)
if not result:
return False
self._lint_rules.Finalize(self._state_tracker, self._tokenizer.mode)
self._error_handler.FinishFile()
return True
def _DependencyPass(self, token):
"""Processes an invidual token for dependency information.
Used to encapsulate the logic needed to process an individual token so that
it can be passed to _ExecutePass.
Args:
token: The token to process.
"""
self._namespaces_info.ProcessToken(token, self._state_tracker)
class GJsLintRunner(lintrunner.LintRunner):
"""Wrapper class to run GJsLint."""
def Run(self, filenames, error_handler):
"""Run GJsLint on the given filenames.
Args:
filenames: The filenames to check
error_handler: An ErrorHandler object.
"""
checker = JavaScriptStyleChecker(error_handler)
# Check the list of files.
for filename in filenames:
checker.Check(filename)
|
DevOps4Networks/ansible | refs/heads/devel | contrib/inventory/openvz.py | 79 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# openvz.py
#
# Copyright 2014 jordonr <jordon@beamsyn.net>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Inspired by libvirt_lxc.py inventory script
# https://github.com/ansible/ansible/blob/e5ef0eca03cbb6c8950c06dc50d0ca22aa8902f4/plugins/inventory/libvirt_lxc.py
#
# Groups are determined by the description field of openvz guests
# multiple groups can be seperated by commas: webserver,dbserver
from subprocess import Popen,PIPE
import sys
import json
#List openvz hosts
vzhosts = ['vzhost1','vzhost2','vzhost3']
#Add openvz hosts to the inventory and Add "_meta" trick
inventory = {'vzhosts': {'hosts': vzhosts}, '_meta': {'hostvars': {}}}
#default group, when description not defined
default_group = ['vzguest']
def get_guests():
#Loop through vzhosts
for h in vzhosts:
#SSH to vzhost and get the list of guests in json
pipe = Popen(['ssh', h,'vzlist','-j'], stdout=PIPE, universal_newlines=True)
#Load Json info of guests
json_data = json.loads(pipe.stdout.read())
#loop through guests
for j in json_data:
#Add information to host vars
inventory['_meta']['hostvars'][j['hostname']] = {'ctid': j['ctid'], 'veid': j['veid'], 'vpsid': j['vpsid'], 'private_path': j['private'], 'root_path': j['root'], 'ip': j['ip']}
#determine group from guest description
if j['description'] is not None:
groups = j['description'].split(",")
else:
groups = default_group
#add guest to inventory
for g in groups:
if g not in inventory:
inventory[g] = {'hosts': []}
inventory[g]['hosts'].append(j['hostname'])
return inventory
if len(sys.argv) == 2 and sys.argv[1] == '--list':
inv_json = get_guests()
print(json.dumps(inv_json, sort_keys=True))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({}))
else:
print("Need an argument, either --list or --host <host>")
|
shepdelacreme/ansible | refs/heads/devel | lib/ansible/modules/packaging/os/pkgin.py | 43 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Shaun Zinck <shaun.zinck at gmail.com>
# Copyright (c) 2015 Lawrence Leonard Gilbert <larry@L2G.to>
# Copyright (c) 2016 Jasper Lievisse Adriaanse <j at jasper.la>
#
# Written by Shaun Zinck
# Based on pacman module written by Afterburn <http://github.com/afterburn>
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pkgin
short_description: Package manager for SmartOS, NetBSD, et al.
description:
- "The standard package manager for SmartOS, but also usable on NetBSD
or any OS that uses C(pkgsrc). (Home: U(http://pkgin.net/))"
version_added: "1.0"
author:
- "Larry Gilbert (L2G)"
- "Shaun Zinck (@szinck)"
- "Jasper Lievisse Adriaanse (@jasperla)"
notes:
- "Known bug with pkgin < 0.8.0: if a package is removed and another
package depends on it, the other package will be silently removed as
well. New to Ansible 1.9: check-mode support."
options:
name:
description:
- Name of package to install/remove;
- multiple names may be given, separated by commas
state:
description:
- Intended state of the package
choices: [ 'present', 'absent' ]
default: present
update_cache:
description:
- Update repository database. Can be run with other steps or on it's own.
type: bool
default: 'no'
version_added: "2.1"
upgrade:
description:
- Upgrade main packages to their newer versions
type: bool
default: 'no'
version_added: "2.1"
full_upgrade:
description:
- Upgrade all packages to their newer versions
type: bool
default: 'no'
version_added: "2.1"
clean:
description:
- Clean packages cache
type: bool
default: 'no'
version_added: "2.1"
force:
description:
- Force package reinstall
type: bool
default: 'no'
version_added: "2.1"
'''
EXAMPLES = '''
# install package foo
- pkgin:
name: foo
state: present
# Update database and install "foo" package
- pkgin:
name: foo
update_cache: yes
# remove package foo
- pkgin:
name: foo
state: absent
# remove packages foo and bar
- pkgin:
name: foo,bar
state: absent
# Update repositories as a separate step
- pkgin:
update_cache: yes
# Upgrade main packages (equivalent to C(pkgin upgrade))
- pkgin:
upgrade: yes
# Upgrade all packages (equivalent to C(pkgin full-upgrade))
- pkgin:
full_upgrade: yes
# Force-upgrade all packages (equivalent to C(pkgin -F full-upgrade))
- pkgin:
full_upgrade: yes
force: yes
# clean packages cache (equivalent to C(pkgin clean))
- pkgin:
clean: yes
'''
import re
from ansible.module_utils.basic import AnsibleModule
def query_package(module, name):
"""Search for the package by name.
Possible return values:
* "present" - installed, no upgrade needed
* "outdated" - installed, but can be upgraded
* False - not installed or not found
"""
# test whether '-p' (parsable) flag is supported.
rc, out, err = module.run_command("%s -p -v" % PKGIN_PATH)
if rc == 0:
pflag = '-p'
splitchar = ';'
else:
pflag = ''
splitchar = ' '
# Use "pkgin search" to find the package. The regular expression will
# only match on the complete name.
rc, out, err = module.run_command("%s %s search \"^%s$\"" % (PKGIN_PATH, pflag, name))
# rc will not be 0 unless the search was a success
if rc == 0:
# Search results may contain more than one line (e.g., 'emacs'), so iterate
# through each line to see if we have a match.
packages = out.split('\n')
for package in packages:
# Break up line at spaces. The first part will be the package with its
# version (e.g. 'gcc47-libs-4.7.2nb4'), and the second will be the state
# of the package:
# '' - not installed
# '<' - installed but out of date
# '=' - installed and up to date
# '>' - installed but newer than the repository version
pkgname_with_version, raw_state = package.split(splitchar)[0:2]
# Search for package, stripping version
# (results in sth like 'gcc47-libs' or 'emacs24-nox11')
pkg_search_obj = re.search(r'^(.*?)\-[0-9][0-9.]*(nb[0-9]+)*', pkgname_with_version, re.M)
# Do not proceed unless we have a match
if not pkg_search_obj:
continue
# Grab matched string
pkgname_without_version = pkg_search_obj.group(1)
if name != pkgname_without_version:
continue
# The package was found; now return its state
if raw_state == '<':
return 'outdated'
elif raw_state == '=' or raw_state == '>':
return 'present'
else:
return False
# no fall-through
# No packages were matched, so return False
return False
def format_action_message(module, action, count):
vars = {"actioned": action,
"count": count}
if module.check_mode:
message = "would have %(actioned)s %(count)d package" % vars
else:
message = "%(actioned)s %(count)d package" % vars
if count == 1:
return message
else:
return message + "s"
def format_pkgin_command(module, command, package=None):
# Not all commands take a package argument, so cover this up by passing
# an empty string. Some commands (e.g. 'update') will ignore extra
# arguments, however this behaviour cannot be relied on for others.
if package is None:
package = ""
if module.params["force"]:
force = "-F"
else:
force = ""
vars = {"pkgin": PKGIN_PATH,
"command": command,
"package": package,
"force": force}
if module.check_mode:
return "%(pkgin)s -n %(command)s %(package)s" % vars
else:
return "%(pkgin)s -y %(force)s %(command)s %(package)s" % vars
def remove_packages(module, packages):
remove_c = 0
# Using a for loop in case of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
rc, out, err = module.run_command(
format_pkgin_command(module, "remove", package))
if not module.check_mode and query_package(module, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg=format_action_message(module, "removed", remove_c))
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, packages):
install_c = 0
for package in packages:
if query_package(module, package):
continue
rc, out, err = module.run_command(
format_pkgin_command(module, "install", package))
if not module.check_mode and not query_package(module, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg=format_action_message(module, "installed", install_c))
module.exit_json(changed=False, msg="package(s) already present")
def update_package_db(module):
rc, out, err = module.run_command(
format_pkgin_command(module, "update"))
if rc == 0:
if re.search('database for.*is up-to-date\n$', out):
return False, "datebase is up-to-date"
else:
return True, "updated repository database"
else:
module.fail_json(msg="could not update package db")
def do_upgrade_packages(module, full=False):
if full:
cmd = "full-upgrade"
else:
cmd = "upgrade"
rc, out, err = module.run_command(
format_pkgin_command(module, cmd))
if rc == 0:
if re.search('^nothing to do.\n$', out):
module.exit_json(changed=False, msg="nothing left to upgrade")
else:
module.fail_json(msg="could not %s packages" % cmd)
def upgrade_packages(module):
do_upgrade_packages(module)
def full_upgrade_packages(module):
do_upgrade_packages(module, True)
def clean_cache(module):
rc, out, err = module.run_command(
format_pkgin_command(module, "clean"))
if rc == 0:
# There's no indication if 'clean' actually removed anything,
# so assume it did.
module.exit_json(changed=True, msg="cleaned caches")
else:
module.fail_json(msg="could not clean package cache")
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default="present", choices=["present", "absent"]),
name=dict(aliases=["pkg"], type='list'),
update_cache=dict(default='no', type='bool'),
upgrade=dict(default='no', type='bool'),
full_upgrade=dict(default='no', type='bool'),
clean=dict(default='no', type='bool'),
force=dict(default='no', type='bool')),
required_one_of=[['name', 'update_cache', 'upgrade', 'full_upgrade', 'clean']],
supports_check_mode=True)
global PKGIN_PATH
PKGIN_PATH = module.get_bin_path('pkgin', True, ['/opt/local/bin'])
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
p = module.params
if p["update_cache"]:
c, msg = update_package_db(module)
if not (p['name'] or p["upgrade"] or p["full_upgrade"]):
module.exit_json(changed=c, msg=msg)
if p["upgrade"]:
upgrade_packages(module)
if not p['name']:
module.exit_json(changed=True, msg='upgraded packages')
if p["full_upgrade"]:
full_upgrade_packages(module)
if not p['name']:
module.exit_json(changed=True, msg='upgraded all packages')
if p["clean"]:
clean_cache(module)
if not p['name']:
module.exit_json(changed=True, msg='cleaned caches')
pkgs = p["name"]
if p["state"] == "present":
install_packages(module, pkgs)
elif p["state"] == "absent":
remove_packages(module, pkgs)
if __name__ == '__main__':
main()
|
schettino72/nikola | refs/heads/master | setup.py | 1 | #!/usr/bin/env python
# Don't use __future__ in this script, it breaks buildout
# from __future__ import print_function
import os
import subprocess
import sys
import shutil
from setuptools import setup, find_packages
from setuptools.command.install import install
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
extras = {}
with open('requirements-extras.txt', 'r') as fh:
extras['extras'] = [l.strip() for l in fh][1:]
# Alternative name.
extras['full'] = extras['extras']
with open('requirements-tests.txt', 'r') as fh:
extras['tests'] = [l.strip() for l in fh][1:]
# ########## platform specific stuff #############
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
raise Exception('Python 2 version < 2.7 is not supported')
elif sys.version_info[0] == 3 and sys.version_info[1] < 3:
raise Exception('Python 3 version < 3.3 is not supported')
##################################################
# Provided as an attribute, so you can append to these instead
# of replicating them:
standard_exclude = ('*.pyc', '*$py.class', '*~', '.*', '*.bak')
standard_exclude_directories = ('.*', 'CVS', '_darcs', './build',
'./dist', 'EGG-INFO', '*.egg-info')
def copy_messages():
themes_directory = os.path.join(
os.path.dirname(__file__), 'nikola', 'data', 'themes')
original_messages_directory = os.path.join(
themes_directory, 'default', 'messages')
for theme in ('orphan', 'monospace'):
theme_messages_directory = os.path.join(
themes_directory, theme, 'messages')
if os.path.exists(theme_messages_directory):
shutil.rmtree(theme_messages_directory)
shutil.copytree(original_messages_directory, theme_messages_directory)
def expands_symlinks_for_windows():
"""replaces the symlinked files with a copy of the original content.
In windows (msysgit), a symlink is converted to a text file with a
path to the file it points to. If not corrected, installing from a git
clone will end with some files with bad content
After install the working copy will be dirty (symlink markers overwritten with
real content)
"""
if sys.platform != 'win32':
return
# apply the fix
localdir = os.path.dirname(os.path.abspath(__file__))
oldpath = sys.path[:]
sys.path.insert(0, os.path.join(localdir, 'nikola'))
winutils = __import__('winutils')
failures = winutils.fix_all_git_symlinked(localdir)
sys.path = oldpath
del sys.modules['winutils']
if failures != -1:
print('WARNING: your working copy is now dirty by changes in samplesite, sphinx and themes')
if failures > 0:
raise Exception("Error: \n\tnot all symlinked files could be fixed." +
"\n\tYour best bet is to start again from clean.")
def remove_old_files(self):
tree = os.path.join(self.install_lib, 'nikola')
try:
shutil.rmtree(tree, ignore_errors=True)
except:
pass
class nikola_install(install):
def run(self):
expands_symlinks_for_windows()
remove_old_files(self)
install.run(self)
setup(name='Nikola',
version='7.6.3',
description='A modular, fast, simple, static website and blog generator',
long_description=open('README.rst').read(),
author='Roberto Alsina and others',
author_email='ralsina@netmanagers.com.ar',
url='https://getnikola.com/',
packages=find_packages(exclude=('tests',)),
license='MIT',
keywords='website, blog, static',
classifiers=('Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Plugins',
'Environment :: Web Environment',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: OS Independent',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Text Processing :: Markup'),
install_requires=dependencies,
extras_require=extras,
tests_require=['pytest'],
include_package_data=True,
cmdclass={'install': nikola_install, 'test': PyTest},
data_files=[
('share/doc/nikola', [
'docs/manual.txt',
'docs/theming.txt',
'docs/extending.txt']),
('share/man/man1', ['docs/man/nikola.1.gz']),
],
entry_points = {
'console_scripts': [
'nikola = nikola.__main__:main'
]
},
)
|
ic-hep/DIRAC | refs/heads/rel-v6r15 | FrameworkSystem/private/monitoring/MonitoringCatalog.py | 1 | """ interacts with sqlite3 db
"""
import sqlite3
import os
import types
import hashlib
import time
import DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.FrameworkSystem.private.monitoring.Activity import Activity
from DIRAC.Core.Utilities import Time
class MonitoringCatalog( object ):
def __init__( self, dataPath ):
"""
Initialize monitoring catalog
"""
self.dbConn = False
self.dataPath = dataPath
self.log = gLogger.getSubLogger( "ActivityCatalog" )
self.createSchema()
def __connect( self ):
"""
Connect to database
"""
if not self.dbConn:
dbPath = "%s/monitoring.db" % self.dataPath
self.dbConn = sqlite3.connect( dbPath, isolation_level = None )
def __dbExecute( self, query, values = False ):
"""
Execute a sql statement
"""
cursor = self.dbConn.cursor()
self.log.debug( "Executing %s" % query )
executed = False
while not executed:
try:
if values:
cursor.execute( query, values )
else:
cursor.execute( query )
executed = True
except:
time.sleep( 0.01 )
return cursor
def __createTables( self ):
"""
Create tables if not already created
"""
self.log.info( "Creating tables in db" )
try:
filePath = "%s/monitoringSchema.sql" % os.path.dirname( __file__ )
fd = open( filePath )
buff = fd.read()
fd.close()
except IOError, e:
DIRAC.abort( 1, "Can't read monitoring schema", filePath )
while buff.find( ";" ) > -1:
limit = buff.find( ";" ) + 1
sqlQuery = buff[ : limit ].replace( "\n", "" )
buff = buff[ limit : ]
try:
self.__dbExecute( sqlQuery )
except Exception as e:
DIRAC.abort( 1, "Can't create tables", str( e ) )
def createSchema( self ):
"""
Create all the sql schema if it does not exist
"""
self.__connect()
try:
sqlQuery = "SELECT name FROM sqlite_master WHERE type='table';"
c = self.__dbExecute( sqlQuery )
tablesList = c.fetchall()
if len( tablesList ) < 2:
self.__createTables()
except Exception as e:
self.log.fatal( "Failed to startup db engine", str( e ) )
return False
return True
def __delete( self, table, dataDict ):
"""
Execute an sql delete
"""
query = "DELETE FROM %s" % table
valuesList = []
keysList = []
for key in dataDict:
if type( dataDict[ key ] ) == types.ListType:
orList = []
for keyValue in dataDict[ key ]:
valuesList.append( keyValue )
orList.append( "%s = ?" % key )
keysList.append( "( %s )" % " OR ".join( orList ) )
else:
valuesList.append( dataDict[ key ] )
keysList.append( "%s = ?" % key )
if keysList:
query += " WHERE %s" % ( " AND ".join( keysList ) )
self.__dbExecute( "%s;" % query, values = valuesList )
def __select( self, fields, table, dataDict, extraCond = "", queryEnd = "" ):
"""
Execute a sql select
"""
valuesList = []
keysList = []
for key in dataDict:
if type( dataDict[ key ] ) == types.ListType:
orList = []
for keyValue in dataDict[ key ]:
valuesList.append( keyValue )
orList.append( "%s = ?" % key )
keysList.append( "( %s )" % " OR ".join( orList ) )
else:
valuesList.append( dataDict[ key ] )
keysList.append( "%s = ?" % key )
if isinstance( fields, basestring ):
fields = [ fields ]
if len( keysList ) > 0:
whereCond = "WHERE %s" % ( " AND ".join( keysList ) )
else:
whereCond = ""
if extraCond:
if whereCond:
whereCond += " AND %s" % extraCond
else:
whereCond = "WHERE %s" % extraCond
query = "SELECT %s FROM %s %s %s;" % (
",".join( fields ),
table,
whereCond,
queryEnd
)
c = self.__dbExecute( query, values = valuesList )
return c.fetchall()
def __insert( self, table, specialDict, dataDict ):
"""
Execute an sql insert
"""
valuesList = []
valuePoitersList = []
namesList = []
for key in specialDict:
namesList.append( key )
valuePoitersList.append( specialDict[ key ] )
for key in dataDict:
namesList.append( key )
valuePoitersList.append( "?" )
valuesList.append( dataDict[ key ] )
query = "INSERT INTO %s (%s) VALUES (%s);" % ( table,
", ".join( namesList ),
",".join( valuePoitersList ) )
c = self.__dbExecute( query, values = valuesList )
return c.rowcount
def __update( self, newValues, table, dataDict, extraCond = "" ):
"""
Execute a sql update
"""
valuesList = []
keysList = []
updateFields = []
for key in newValues:
updateFields.append( "%s = ?" % key )
valuesList.append( newValues[ key ] )
for key in dataDict:
if type( dataDict[ key ] ) == types.ListType:
orList = []
for keyValue in dataDict[ key ]:
valuesList.append( keyValue )
orList.append( "%s = ?" % key )
keysList.append( "( %s )" % " OR ".join( orList ) )
else:
valuesList.append( dataDict[ key ] )
keysList.append( "%s = ?" % key )
if len( keysList ) > 0:
whereCond = "WHERE %s" % ( " AND ".join( keysList ) )
else:
whereCond = ""
if extraCond:
if whereCond:
whereCond += " AND %s" % extraCond
else:
whereCond = "WHERE %s" % extraCond
query = "UPDATE %s SET %s %s;" % ( table,
",".join( updateFields ),
whereCond
)
c = self.__dbExecute( query, values = valuesList )
return c.rowcount
def registerSource( self, sourceDict ):
"""
Register an activity source
"""
retList = self.__select( "id", "sources", sourceDict )
if len( retList ) > 0:
return retList[0][0]
else:
self.log.info( "Registering source", str( sourceDict ) )
if self.__insert( "sources", { 'id' : 'NULL' }, sourceDict ) == 0:
return -1
return self.__select( "id", "sources", sourceDict )[0][0]
def registerActivity( self, sourceId, acName, acDict ):
"""
Register an activity
"""
m = hashlib.md5()
acDict[ 'name' ] = acName
acDict[ 'sourceId' ] = sourceId
m.update( str( acDict ) )
retList = self.__select( "filename", "activities", acDict )
if len( retList ) > 0:
return retList[0][0]
else:
acDict[ 'lastUpdate' ] = int( Time.toEpoch() - 86000 )
filePath = m.hexdigest()
filePath = "%s/%s.rrd" % ( filePath[:2], filePath )
self.log.info( "Registering activity", str( acDict ) )
if self.__insert( "activities", {
'id' : 'NULL',
'filename' : "'%s'" % filePath,
},
acDict ) == 0:
return -1
return self.__select( "filename", "activities", acDict )[0][0]
def getFilename( self, sourceId, acName ):
"""
Get rrd filename for an activity
"""
queryDict = { 'sourceId' : sourceId, "name" : acName }
retList = self.__select( "filename", "activities", queryDict )
if len( retList ) == 0:
return ""
else:
return retList[0][0]
def findActivity( self, sourceId, acName ):
"""
Find activity
"""
queryDict = { 'sourceId' : sourceId, "name" : acName }
retList = self.__select( "id, name, category, unit, type, description, filename, bucketLength, lastUpdate", "activities", queryDict )
if len( retList ) == 0:
return False
else:
return retList[0]
def activitiesQuery( self, selDict, sortList, start, limit ):
fields = [ 'sources.id', 'sources.site', 'sources.componentType', 'sources.componentLocation',
'sources.componentName', 'activities.id', 'activities.name', 'activities.category',
'activities.unit', 'activities.type', 'activities.description',
'activities.bucketLength', 'activities.filename', 'activities.lastUpdate' ]
extraSQL = ""
if sortList:
for sorting in sortList:
if sorting[0] not in fields:
return S_ERROR( "Sorting field %s is invalid" % sorting[0] )
extraSQL = "ORDER BY %s" % ",".join( [ "%s %s" % sorting for sorting in sortList ] )
if limit:
if start:
extraSQL += " LIMIT %s OFFSET %s" % ( limit, start )
else:
extraSQL += " LIMIT %s" % limit
retList = self.__select( ", ".join( fields ), 'sources, activities', selDict, 'sources.id = activities.sourceId',
extraSQL )
return S_OK( ( retList, fields ) )
def setLastUpdate( self, sourceId, acName, lastUpdateTime ):
queryDict = { 'sourceId' : sourceId, "name" : acName }
return self.__update( { 'lastUpdate' : lastUpdateTime }, "activities", queryDict )
def getLastUpdate( self, sourceId, acName ):
queryDict = { 'sourceId' : sourceId, "name" : acName }
retList = self.__update( 'lastUpdate', "activities", queryDict )
if len( retList ) == 0:
return False
else:
return retList[0]
def queryField( self, field, definedFields ):
"""
Query the values of a field given a set of defined ones
"""
retList = self.__select( field, "sources, activities", definedFields, "sources.id = activities.sourceId" )
return retList
def getMatchingActivities( self, condDict ):
"""
Get all activities matching the defined conditions
"""
retList = self.queryField( Activity.dbFields, condDict )
acList = []
for acData in retList:
acList.append( Activity( acData ) )
return acList
def registerView( self, viewName, viewData, varFields ):
"""
Register a new view
"""
retList = self.__select( "id", "views", { 'name' : viewName } )
if len( retList ) > 0:
return S_ERROR( "Name for view name already exists" )
retList = self.__select( "name", "views", { 'definition' : viewData } )
if len( retList ) > 0:
return S_ERROR( "View specification already defined with name '%s'" % retList[0][0] )
self.__insert( "views", { 'id' : 'NULL' }, { 'name' : viewName,
'definition' : viewData,
'variableFields' : ", ".join( varFields )
} )
return S_OK()
def getViews( self, onlyStatic ):
"""
Get views
"""
queryCond = {}
if onlyStatic:
queryCond[ 'variableFields' ] = ""
return self.__select( "id, name, variableFields", "views", queryCond )
def getViewById( self, viewId ):
"""
Get a view for a given id
"""
if isinstance( viewId, basestring ):
return self.__select( "definition, variableFields", "views", { "name" : viewId } )
else:
return self.__select( "definition, variableFields", "views", { "id" : viewId } )
def deleteView( self, viewId ):
"""
Delete a view
"""
self.__delete( "views", { 'id' : viewId } )
def getSources( self, dbCond, fields = [] ):
if not fields:
fields = "id, site, componentType, componentLocation, componentName"
else:
fields = ", ".join( fields )
return self.__select( fields,
"sources",
dbCond )
def getActivities( self, dbCond ):
return self.__select( "id, name, category, unit, type, description, bucketLength",
"activities",
dbCond )
def deleteActivity( self, sourceId, activityId ):
"""
Delete a view
"""
acCond = { 'sourceId' : sourceId, 'id' : activityId }
acList = self.__select( "filename", "activities", acCond )
if len( acList ) == 0:
return S_ERROR( "Activity does not exist" )
rrdFile = acList[0][0]
self.__delete( "activities", acCond )
acList = self.__select( "id", "activities", { 'sourceId' : sourceId } )
if len( acList ) == 0:
self.__delete( "sources", { 'id' : sourceId } )
return S_OK( rrdFile )
|
infobloxopen/infoblox-netmri | refs/heads/master | infoblox_netmri/api/broker/v3_2_0/device_object_object_broker.py | 14 | from ..broker import Broker
class DeviceObjectObjectBroker(Broker):
controller = "device_object_objects"
def show(self, **kwargs):
"""Shows the details for the specified device object object.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object object methods. The listed methods will be called on each device object object returned and included in the output. Available methods are: parent_device_object, child_device_object, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_object, child_device_object, data_source, device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_object_object: The device object object identified by the specified DeviceObjectObjectID.
:rtype device_object_object: DeviceObjectObject
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available device object objects. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which belongs this network objects.
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device object objects as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object object methods. The listed methods will be called on each device object object returned and included in the output. Available methods are: parent_device_object, child_device_object, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_object, child_device_object, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceObjectObjectID
:param sort: The data field(s) to use for sorting the output. Default is DeviceObjectObjectID. Valid values are DeviceObjectObjectID, DeviceID, DataSourceID, ParentDeviceObjectID, ChildDeviceObjectID, OoFirstSeenTime, OoStartTime, OoEndTime, OoTimestamp, OoChangedCols, OoProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceObjectObject. Valid values are DeviceObjectObjectID, DeviceID, DataSourceID, ParentDeviceObjectID, ChildDeviceObjectID, OoFirstSeenTime, OoStartTime, OoEndTime, OoTimestamp, OoChangedCols, OoProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_object_objects: An array of the DeviceObjectObject objects that match the specified input criteria.
:rtype device_object_objects: Array of DeviceObjectObject
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available device object objects matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ChildDeviceObjectID: The internal NetMRI identifier of the child network object (the used service).
:type ChildDeviceObjectID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device to which belongs this network objects.
:type DeviceID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Array of Integer
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OoChangedCols: The fields that changed between this revision of the record and the previous revision.
:type OoChangedCols: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OoEndTime: The ending effective time of this record, or empty if still in effect.
:type OoEndTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OoFirstSeenTime: The timestamp of when NetMRI saw for the first time this relationship.
:type OoFirstSeenTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OoProvisionData: Internal data - do not modify, may change without warning.
:type OoProvisionData: Array of String
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OoStartTime: The starting effective time of this record.
:type OoStartTime: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OoTimestamp: The date and time this record was collected or calculated.
:type OoTimestamp: Array of DateTime
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ParentDeviceObjectID: The internal NetMRI identifier of the parent network object (the user).
:type ParentDeviceObjectID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device object objects as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object object methods. The listed methods will be called on each device object object returned and included in the output. Available methods are: parent_device_object, child_device_object, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_object, child_device_object, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceObjectObjectID
:param sort: The data field(s) to use for sorting the output. Default is DeviceObjectObjectID. Valid values are DeviceObjectObjectID, DeviceID, DataSourceID, ParentDeviceObjectID, ChildDeviceObjectID, OoFirstSeenTime, OoStartTime, OoEndTime, OoTimestamp, OoChangedCols, OoProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceObjectObject. Valid values are DeviceObjectObjectID, DeviceID, DataSourceID, ParentDeviceObjectID, ChildDeviceObjectID, OoFirstSeenTime, OoStartTime, OoEndTime, OoTimestamp, OoChangedCols, OoProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device object objects, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: ChildDeviceObjectID, DataSourceID, DeviceID, DeviceObjectObjectID, OoChangedCols, OoEndTime, OoFirstSeenTime, OoProvisionData, OoStartTime, OoTimestamp, ParentDeviceObjectID.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_object_objects: An array of the DeviceObjectObject objects that match the specified input criteria.
:rtype device_object_objects: Array of DeviceObjectObject
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device object objects matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: ChildDeviceObjectID, DataSourceID, DeviceID, DeviceObjectObjectID, OoChangedCols, OoEndTime, OoFirstSeenTime, OoProvisionData, OoStartTime, OoTimestamp, ParentDeviceObjectID.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ChildDeviceObjectID: The operator to apply to the field ChildDeviceObjectID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ChildDeviceObjectID: The internal NetMRI identifier of the child network object (the used service). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ChildDeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ChildDeviceObjectID: If op_ChildDeviceObjectID is specified, the field named in this input will be compared to the value in ChildDeviceObjectID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ChildDeviceObjectID must be specified if op_ChildDeviceObjectID is specified.
:type val_f_ChildDeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ChildDeviceObjectID: If op_ChildDeviceObjectID is specified, this value will be compared to the value in ChildDeviceObjectID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ChildDeviceObjectID must be specified if op_ChildDeviceObjectID is specified.
:type val_c_ChildDeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device to which belongs this network objects. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceObjectObjectID: The operator to apply to the field DeviceObjectObjectID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceObjectObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceObjectObjectID: If op_DeviceObjectObjectID is specified, the field named in this input will be compared to the value in DeviceObjectObjectID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceObjectObjectID must be specified if op_DeviceObjectObjectID is specified.
:type val_f_DeviceObjectObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceObjectObjectID: If op_DeviceObjectObjectID is specified, this value will be compared to the value in DeviceObjectObjectID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceObjectObjectID must be specified if op_DeviceObjectObjectID is specified.
:type val_c_DeviceObjectObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OoChangedCols: The operator to apply to the field OoChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OoChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OoChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OoChangedCols: If op_OoChangedCols is specified, the field named in this input will be compared to the value in OoChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OoChangedCols must be specified if op_OoChangedCols is specified.
:type val_f_OoChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OoChangedCols: If op_OoChangedCols is specified, this value will be compared to the value in OoChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OoChangedCols must be specified if op_OoChangedCols is specified.
:type val_c_OoChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OoEndTime: The operator to apply to the field OoEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OoEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OoEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OoEndTime: If op_OoEndTime is specified, the field named in this input will be compared to the value in OoEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OoEndTime must be specified if op_OoEndTime is specified.
:type val_f_OoEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OoEndTime: If op_OoEndTime is specified, this value will be compared to the value in OoEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OoEndTime must be specified if op_OoEndTime is specified.
:type val_c_OoEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OoFirstSeenTime: The operator to apply to the field OoFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OoFirstSeenTime: The timestamp of when NetMRI saw for the first time this relationship. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OoFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OoFirstSeenTime: If op_OoFirstSeenTime is specified, the field named in this input will be compared to the value in OoFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OoFirstSeenTime must be specified if op_OoFirstSeenTime is specified.
:type val_f_OoFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OoFirstSeenTime: If op_OoFirstSeenTime is specified, this value will be compared to the value in OoFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OoFirstSeenTime must be specified if op_OoFirstSeenTime is specified.
:type val_c_OoFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OoProvisionData: The operator to apply to the field OoProvisionData. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OoProvisionData: Internal data - do not modify, may change without warning. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OoProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OoProvisionData: If op_OoProvisionData is specified, the field named in this input will be compared to the value in OoProvisionData using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OoProvisionData must be specified if op_OoProvisionData is specified.
:type val_f_OoProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OoProvisionData: If op_OoProvisionData is specified, this value will be compared to the value in OoProvisionData using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OoProvisionData must be specified if op_OoProvisionData is specified.
:type val_c_OoProvisionData: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OoStartTime: The operator to apply to the field OoStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OoStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OoStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OoStartTime: If op_OoStartTime is specified, the field named in this input will be compared to the value in OoStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OoStartTime must be specified if op_OoStartTime is specified.
:type val_f_OoStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OoStartTime: If op_OoStartTime is specified, this value will be compared to the value in OoStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OoStartTime must be specified if op_OoStartTime is specified.
:type val_c_OoStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OoTimestamp: The operator to apply to the field OoTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OoTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OoTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OoTimestamp: If op_OoTimestamp is specified, the field named in this input will be compared to the value in OoTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OoTimestamp must be specified if op_OoTimestamp is specified.
:type val_f_OoTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OoTimestamp: If op_OoTimestamp is specified, this value will be compared to the value in OoTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OoTimestamp must be specified if op_OoTimestamp is specified.
:type val_c_OoTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ParentDeviceObjectID: The operator to apply to the field ParentDeviceObjectID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ParentDeviceObjectID: The internal NetMRI identifier of the parent network object (the user). For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ParentDeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ParentDeviceObjectID: If op_ParentDeviceObjectID is specified, the field named in this input will be compared to the value in ParentDeviceObjectID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ParentDeviceObjectID must be specified if op_ParentDeviceObjectID is specified.
:type val_f_ParentDeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ParentDeviceObjectID: If op_ParentDeviceObjectID is specified, this value will be compared to the value in ParentDeviceObjectID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ParentDeviceObjectID must be specified if op_ParentDeviceObjectID is specified.
:type val_c_ParentDeviceObjectID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the device object objects as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of device object object methods. The listed methods will be called on each device object object returned and included in the output. Available methods are: parent_device_object, child_device_object, data_source, device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: parent_device_object, child_device_object, data_source, device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceObjectObjectID
:param sort: The data field(s) to use for sorting the output. Default is DeviceObjectObjectID. Valid values are DeviceObjectObjectID, DeviceID, DataSourceID, ParentDeviceObjectID, ChildDeviceObjectID, OoFirstSeenTime, OoStartTime, OoEndTime, OoTimestamp, OoChangedCols, OoProvisionData.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceObjectObject. Valid values are DeviceObjectObjectID, DeviceID, DataSourceID, ParentDeviceObjectID, ChildDeviceObjectID, OoFirstSeenTime, OoStartTime, OoEndTime, OoTimestamp, OoChangedCols, OoProvisionData. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_object_objects: An array of the DeviceObjectObject objects that match the specified input criteria.
:rtype device_object_objects: Array of DeviceObjectObject
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def parent_device_object(self, **kwargs):
"""The parent network object of this relationship.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The parent network object of this relationship.
:rtype : DeviceObject
"""
return self.api_request(self._get_method_fullname("parent_device_object"), kwargs)
def child_device_object(self, **kwargs):
"""The child network object of this relationship.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The child network object of this relationship.
:rtype : DeviceObject
"""
return self.api_request(self._get_method_fullname("child_device_object"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param DeviceObjectObjectID: The internal NetMRI identifier of this usage relationship between network objects.
:type DeviceObjectObjectID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
|
wackywendell/numpy | refs/heads/master | benchmarks/benchmarks/bench_indexing.py | 55 | from __future__ import absolute_import, division, print_function
from .common import Benchmark, get_squares_, get_indexes_, get_indexes_rand_
from os.path import join as pjoin
import shutil
import sys
import six
from numpy import memmap, float32, array
import numpy as np
from tempfile import mkdtemp
class Indexing(Benchmark):
params = [["indexes_", "indexes_rand_"],
['I', ':,I', 'np.ix_(I, I)'],
['', '=1']]
param_names = ['indexes', 'sel', 'op']
def setup(self, indexes, sel, op):
sel = sel.replace('I', indexes)
ns = {'squares_': get_squares_(),
'np': np,
'indexes_': get_indexes_(),
'indexes_rand_': get_indexes_rand_()}
if sys.version_info[0] >= 3:
code = "def run():\n for a in squares_.values(): a[%s]%s"
else:
code = "def run():\n for a in squares_.itervalues(): a[%s]%s"
code = code % (sel, op)
six.exec_(code, ns)
self.func = ns['run']
def time_op(self, indexes, sel, op):
self.func()
class IndexingSeparate(Benchmark):
def setup(self):
self.tmp_dir = mkdtemp()
self.fp = memmap(pjoin(self.tmp_dir, 'tmp.dat'),
dtype=float32, mode='w+', shape=(50, 60))
self.indexes = array([3, 4, 6, 10, 20])
def teardown(self):
del self.fp
shutil.rmtree(self.tmp_dir)
def time_mmap_slicing(self):
for i in range(1000):
self.fp[5:10]
def time_mmap_fancy_indexing(self):
for i in range(1000):
self.fp[self.indexes]
class IndexingStructured0D(Benchmark):
def setup(self):
self.dt = np.dtype([('a', 'f4', 256)])
self.A = np.zeros((), self.dt)
self.B = self.A.copy()
self.a = np.zeros(1, self.dt)[0]
self.b = self.a.copy()
def time_array_slice(self):
self.B['a'][:] = self.A['a']
def time_array_all(self):
self.B['a'] = self.A['a']
def time_scalar_slice(self):
self.b['a'][:] = self.a['a']
def time_scalar_all(self):
self.b['a'] = self.a['a']
|
upliftaero/MissionPlanner | refs/heads/master | Lib/site-packages/scipy/linalg/decomp_svd.py | 53 | """SVD decomposition functions."""
import numpy
from numpy import asarray_chkfinite, zeros, r_, diag
from scipy.linalg import calc_lwork
# Local imports.
from misc import LinAlgError, _datacopied
from lapack import get_lapack_funcs
from funcinfo import get_func_info
def svd(a, full_matrices=True, compute_uv=True, overwrite_a=False):
"""Singular Value Decomposition.
Factorizes the matrix a into two unitary matrices U and Vh and
an 1d-array s of singular values (real, non-negative) such that
a == U S Vh if S is an suitably shaped matrix of zeros whose
main diagonal is s.
Parameters
----------
a : array, shape (M, N)
Matrix to decompose
full_matrices : boolean
If true, U, Vh are shaped (M,M), (N,N)
If false, the shapes are (M,K), (K,N) where K = min(M,N)
compute_uv : boolean
Whether to compute also U, Vh in addition to s (Default: true)
overwrite_a : boolean
Whether data in a is overwritten (may improve performance)
Returns
-------
U: array, shape (M,M) or (M,K) depending on full_matrices
s: array, shape (K,)
The singular values, sorted so that s[i] >= s[i+1]. K = min(M, N)
Vh: array, shape (N,N) or (K,N) depending on full_matrices
For compute_uv = False, only s is returned.
Raises LinAlgError if SVD computation does not converge
Examples
--------
>>> from scipy import random, linalg, allclose, dot
>>> a = random.randn(9, 6) + 1j*random.randn(9, 6)
>>> U, s, Vh = linalg.svd(a)
>>> U.shape, Vh.shape, s.shape
((9, 9), (6, 6), (6,))
>>> U, s, Vh = linalg.svd(a, full_matrices=False)
>>> U.shape, Vh.shape, s.shape
((9, 6), (6, 6), (6,))
>>> S = linalg.diagsvd(s, 6, 6)
>>> allclose(a, dot(U, dot(S, Vh)))
True
>>> s2 = linalg.svd(a, compute_uv=False)
>>> allclose(s, s2)
True
See also
--------
svdvals : return singular values of a matrix
diagsvd : return the Sigma matrix, given the vector s
"""
# A hack until full_matrices == 0 support is fixed here.
if full_matrices == 0:
import numpy.linalg
return numpy.linalg.svd(a, full_matrices=0, compute_uv=compute_uv)
a1 = asarray_chkfinite(a)
if len(a1.shape) != 2:
raise ValueError('expected matrix')
m,n = a1.shape
overwrite_a = overwrite_a or (_datacopied(a1, a))
gesdd, = get_lapack_funcs(('gesdd',), (a1,))
gesdd_info = get_func_info(gesdd)
if gesdd_info.module_name[:7] == 'flapack':
lwork = calc_lwork.gesdd(gesdd_info.prefix, m, n, compute_uv)[1]
u,s,v,info = gesdd(a1,compute_uv = compute_uv, lwork = lwork,
overwrite_a = overwrite_a)
else: # 'clapack'
raise NotImplementedError('calling gesdd from %s' % gesdd_info.module_name)
if info > 0:
raise LinAlgError("SVD did not converge")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal gesdd'
% -info)
if compute_uv:
return u, s, v
else:
return s
def svdvals(a, overwrite_a=False):
"""Compute singular values of a matrix.
Parameters
----------
a : array, shape (M, N)
Matrix to decompose
overwrite_a : boolean
Whether data in a is overwritten (may improve performance)
Returns
-------
s: array, shape (K,)
The singular values, sorted so that s[i] >= s[i+1]. K = min(M, N)
Raises LinAlgError if SVD computation does not converge
See also
--------
svd : return the full singular value decomposition of a matrix
diagsvd : return the Sigma matrix, given the vector s
"""
return svd(a, compute_uv=0, overwrite_a=overwrite_a)
def diagsvd(s, M, N):
"""Construct the sigma matrix in SVD from singular values and size M,N.
Parameters
----------
s : array, shape (M,) or (N,)
Singular values
M : integer
N : integer
Size of the matrix whose singular values are s
Returns
-------
S : array, shape (M, N)
The S-matrix in the singular value decomposition
"""
part = diag(s)
typ = part.dtype.char
MorN = len(s)
if MorN == M:
return r_['-1', part, zeros((M, N-M), typ)]
elif MorN == N:
return r_[part, zeros((M-N,N), typ)]
else:
raise ValueError("Length of s must be M or N.")
# Orthonormal decomposition
def orth(A):
"""Construct an orthonormal basis for the range of A using SVD
Parameters
----------
A : array, shape (M, N)
Returns
-------
Q : array, shape (M, K)
Orthonormal basis for the range of A.
K = effective rank of A, as determined by automatic cutoff
See also
--------
svd : Singular value decomposition of a matrix
"""
u, s, vh = svd(A)
M, N = A.shape
eps = numpy.finfo(float).eps
tol = max(M,N) * numpy.amax(s) * eps
num = numpy.sum(s > tol, dtype=int)
Q = u[:,:num]
return Q
|
nzavagli/UnrealPy | refs/heads/master | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Lib/lib2to3/fixes/fix_xreadlines.py | 327 | """Fix "for x in f.xreadlines()" -> "for x in f".
This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
# Author: Collin Winter
# Local imports
from .. import fixer_base
from ..fixer_util import Name
class FixXreadlines(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
|
power< any+ trailer< '.' no_call='xreadlines' > >
"""
def transform(self, node, results):
no_call = results.get("no_call")
if no_call:
no_call.replace(Name(u"__iter__", prefix=no_call.prefix))
else:
node.replace([x.clone() for x in results["call"]])
|
caioserra/apiAdwords | refs/heads/master | examples/adspygoogle/dfp/v201308/get_all_content.py | 2 | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all content. This feature is only available to DFP
video publishers."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
content_service = client.GetService('ContentService', version='v201308')
# Get content by statement.
content = DfpUtils.GetAllEntitiesByStatementWithService(content_service)
# Display results.
for content_item in content:
print ('Content with id \'%s\', name \'%s\', and status \'%s\' was found.'
% (content_item['id'], content_item['name'], content_item['status']))
print
print 'Number of results found: %s' % len(content)
|
klonage/nlt-gcs | refs/heads/master | Lib/code.py | 62 | """Utilities needed to emulate Python's interactive interpreter.
"""
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
import sys
import traceback
from codeop import CommandCompiler, compile_command
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
"compile_command"]
def softspace(file, newvalue):
oldvalue = 0
try:
oldvalue = file.softspace
except AttributeError:
pass
try:
file.softspace = newvalue
except (AttributeError, TypeError):
# "attribute-less object" or "read-only attributes"
pass
return oldvalue
class InteractiveInterpreter:
"""Base class for InteractiveConsole.
This class deals with parsing and interpreter state (the user's
namespace); it doesn't deal with input buffering or prompting or
input file naming (the filename is always passed in explicitly).
"""
def __init__(self, locals=None):
"""Constructor.
The optional 'locals' argument specifies the dictionary in
which code will be executed; it defaults to a newly created
dictionary with key "__name__" set to "__console__" and key
"__doc__" set to None.
"""
if locals is None:
locals = {"__name__": "__console__", "__doc__": None}
self.locals = locals
self.compile = CommandCompiler()
def runsource(self, source, filename="<input>", symbol="single"):
"""Compile and run some source in the interpreter.
Arguments are as for compile_command().
One several things can happen:
1) The input is incorrect; compile_command() raised an
exception (SyntaxError or OverflowError). A syntax traceback
will be printed by calling the showsyntaxerror() method.
2) The input is incomplete, and more input is required;
compile_command() returned None. Nothing happens.
3) The input is complete; compile_command() returned a code
object. The code is executed by calling self.runcode() (which
also handles run-time exceptions, except for SystemExit).
The return value is True in case 2, False in the other cases (unless
an exception is raised). The return value can be used to
decide whether to use sys.ps1 or sys.ps2 to prompt the next
line.
"""
try:
code = self.compile(source, filename, symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
self.showsyntaxerror(filename)
return False
if code is None:
# Case 2
return True
# Case 3
self.runcode(code)
return False
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
exec code in self.locals
except SystemExit:
raise
except:
self.showtraceback()
else:
if softspace(sys.stdout, 0):
print
def showsyntaxerror(self, filename=None):
"""Display the syntax error that just occurred.
This doesn't display a stack trace because there isn't one.
If a filename is given, it is stuffed in the exception instead
of what was there before (because Python's parser always uses
"<string>" when reading from a string).
The output is written by self.write(), below.
"""
type, value, sys.last_traceback = sys.exc_info()
sys.last_type = type
sys.last_value = value
if filename and type is SyntaxError:
# Work hard to stuff the correct filename in the exception
try:
msg, (dummy_filename, lineno, offset, line) = value
except:
# Not the format we expect; leave it alone
pass
else:
# Stuff in the right filename
value = SyntaxError(msg, (filename, lineno, offset, line))
sys.last_value = value
list = traceback.format_exception_only(type, value)
map(self.write, list)
def showtraceback(self):
"""Display the exception that just occurred.
We remove the first stack item because it is our own code.
The output is written by self.write(), below.
"""
try:
type, value, tb = sys.exc_info()
sys.last_type = type
sys.last_value = value
sys.last_traceback = tb
tblist = traceback.extract_tb(tb)
del tblist[:1]
list = traceback.format_list(tblist)
if list:
list.insert(0, "Traceback (most recent call last):\n")
list[len(list):] = traceback.format_exception_only(type, value)
finally:
tblist = tb = None
map(self.write, list)
def write(self, data):
"""Write a string.
The base implementation writes to sys.stderr; a subclass may
replace this with a different implementation.
"""
sys.stderr.write(data)
class InteractiveConsole(InteractiveInterpreter):
"""Closely emulate the behavior of the interactive Python interpreter.
This class builds on InteractiveInterpreter and adds prompting
using the familiar sys.ps1 and sys.ps2, and input buffering.
"""
def __init__(self, locals=None, filename="<console>"):
"""Constructor.
The optional locals argument will be passed to the
InteractiveInterpreter base class.
The optional filename argument should specify the (file)name
of the input stream; it will show up in tracebacks.
"""
InteractiveInterpreter.__init__(self, locals)
self.filename = filename
self.resetbuffer()
def resetbuffer(self):
"""Reset the input buffer."""
self.buffer = []
def interact(self, banner=None):
"""Closely emulate the interactive Python console.
The optional banner argument specify the banner to print
before the first interaction; by default it prints a banner
similar to the one printed by the real Python interpreter,
followed by the current class name in parentheses (so as not
to confuse this with the real interpreter -- since it's so
close!).
"""
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>> "
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
if banner is None:
self.write("Python %s on %s\n%s\n(%s)\n" %
(sys.version, sys.platform, cprt,
self.__class__.__name__))
else:
self.write("%s\n" % str(banner))
more = 0
while 1:
try:
if more:
prompt = sys.ps2
else:
prompt = sys.ps1
try:
line = self.raw_input(prompt)
# Can be None if sys.stdin was redefined
encoding = getattr(sys.stdin, "encoding", None)
if encoding and not isinstance(line, unicode):
line = line.decode(encoding)
except EOFError:
self.write("\n")
break
else:
more = self.push(line)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
def push(self, line):
"""Push a line to the interpreter.
The line should not have a trailing newline; it may have
internal newlines. The line is appended to a buffer and the
interpreter's runsource() method is called with the
concatenated contents of the buffer as source. If this
indicates that the command was executed or invalid, the buffer
is reset; otherwise, the command is incomplete, and the buffer
is left as it was after the line was appended. The return
value is 1 if more input is required, 0 if the line was dealt
with in some way (this is the same as runsource()).
"""
self.buffer.append(line)
source = "\n".join(self.buffer)
more = self.runsource(source, self.filename)
if not more:
self.resetbuffer()
return more
def raw_input(self, prompt=""):
"""Write a prompt and read a line.
The returned line does not include the trailing newline.
When the user enters the EOF key sequence, EOFError is raised.
The base implementation uses the built-in function
raw_input(); a subclass may replace this with a different
implementation.
"""
return raw_input(prompt)
def interact(banner=None, readfunc=None, local=None):
"""Closely emulate the interactive Python interpreter.
This is a backwards compatible interface to the InteractiveConsole
class. When readfunc is not specified, it attempts to import the
readline module to enable GNU readline if it is available.
Arguments (all optional, all default to None):
banner -- passed to InteractiveConsole.interact()
readfunc -- if not None, replaces InteractiveConsole.raw_input()
local -- passed to InteractiveInterpreter.__init__()
"""
console = InteractiveConsole(local)
if readfunc is not None:
console.raw_input = readfunc
else:
try:
import readline
except ImportError:
pass
console.interact(banner)
if __name__ == "__main__":
interact()
|
andfoy/margffoy-tuay-server | refs/heads/master | env/lib/python2.7/site-packages/tests/test_base_model_publisher_router_deleted.py | 13 | from swampdragon.route_handler import BaseModelPublisherRouter
from swampdragon.serializers.model_serializer import ModelSerializer
from swampdragon.testing.dragon_testcase import DragonTestCase
from .models import TwoFieldModel
class Serializer(ModelSerializer):
class Meta:
publish_fields = ('id')
update_fields = ('text', 'number')
model = TwoFieldModel
class Router(BaseModelPublisherRouter):
model = TwoFieldModel
serializer_class = Serializer
def get_object(self, **kwargs):
return self.model.objects.get(**kwargs)
class TestBaseModelPublisherRouter(DragonTestCase):
def setUp(self):
self.router = Router(self.connection)
self.obj = self.router.model.objects.create(text='text', number=5)
def test_deleted(self):
data = {'id': self.obj.pk}
self.router.subscribe(**{'channel': 'client-channel'})
self.router.delete(**data)
actual = self.connection.get_last_published()
expected = {'action': 'deleted', 'channel': 'twofieldmodel|', 'data': {'_type': 'twofieldmodel', 'id': 1}}
self.assertDictEqual(actual, expected)
|
richardtran415/pymatgen | refs/heads/master | pymatgen/symmetry/__init__.py | 25 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The symmetry package implements symmetry tools, e.g., spacegroup determination,
etc.
"""
|
andrewsmedina/django | refs/heads/master | django/contrib/staticfiles/finders.py | 8 | import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import default_storage, Storage, FileSystemStorage
from django.utils.datastructures import SortedDict
from django.utils.functional import empty, memoize, LazyObject
from django.utils.module_loading import import_by_path
from django.utils._os import safe_join
from django.utils import six
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.storage import AppStaticStorage
_finders = SortedDict()
class BaseFinder(object):
"""
A base file finder to be used for custom staticfiles finder classes.
"""
def find(self, path, all=False):
"""
Given a relative file path this ought to find an
absolute file path.
If the ``all`` parameter is ``False`` (default) only
the first found file path will be returned; if set
to ``True`` a list of all found files paths is returned.
"""
raise NotImplementedError()
def list(self, ignore_patterns):
"""
Given an optional list of paths to ignore, this should return
a two item iterable consisting of the relative path and storage
instance.
"""
raise NotImplementedError()
class FileSystemFinder(BaseFinder):
"""
A static files finder that uses the ``STATICFILES_DIRS`` setting
to locate files.
"""
def __init__(self, apps=None, *args, **kwargs):
# List of locations with static files
self.locations = []
# Maps dir paths to an appropriate storage instance
self.storages = SortedDict()
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
raise ImproperlyConfigured(
"Your STATICFILES_DIRS setting is not a tuple or list; "
"perhaps you forgot a trailing comma?")
for root in settings.STATICFILES_DIRS:
if isinstance(root, (list, tuple)):
prefix, root = root
else:
prefix = ''
if os.path.abspath(settings.STATIC_ROOT) == os.path.abspath(root):
raise ImproperlyConfigured(
"The STATICFILES_DIRS setting should "
"not contain the STATIC_ROOT setting")
if (prefix, root) not in self.locations:
self.locations.append((prefix, root))
for prefix, root in self.locations:
filesystem_storage = FileSystemStorage(location=root)
filesystem_storage.prefix = prefix
self.storages[root] = filesystem_storage
super(FileSystemFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the extra locations
as defined in ``STATICFILES_DIRS``.
"""
matches = []
for prefix, root in self.locations:
matched_path = self.find_location(root, path, prefix)
if matched_path:
if not all:
return matched_path
matches.append(matched_path)
return matches
def find_location(self, root, path, prefix=None):
"""
Finds a requested static file in a location, returning the found
absolute path (or ``None`` if no match).
"""
if prefix:
prefix = '%s%s' % (prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
path = safe_join(root, path)
if os.path.exists(path):
return path
def list(self, ignore_patterns):
"""
List all files in all locations.
"""
for prefix, root in self.locations:
storage = self.storages[root]
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
class AppDirectoriesFinder(BaseFinder):
"""
A static files finder that looks in the directory of each app as
specified in the source_dir attribute of the given storage class.
"""
storage_class = AppStaticStorage
def __init__(self, apps=None, *args, **kwargs):
# The list of apps that are handled
self.apps = []
# Mapping of app module paths to storage instances
self.storages = SortedDict()
if apps is None:
apps = settings.INSTALLED_APPS
for app in apps:
app_storage = self.storage_class(app)
if os.path.isdir(app_storage.location):
self.storages[app] = app_storage
if app not in self.apps:
self.apps.append(app)
super(AppDirectoriesFinder, self).__init__(*args, **kwargs)
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in six.itervalues(self.storages):
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
def find(self, path, all=False):
"""
Looks for files in the app directories.
"""
matches = []
for app in self.apps:
match = self.find_in_app(app, path)
if match:
if not all:
return match
matches.append(match)
return matches
def find_in_app(self, app, path):
"""
Find a requested static file in an app's static locations.
"""
storage = self.storages.get(app, None)
if storage:
if storage.prefix:
prefix = '%s%s' % (storage.prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
# only try to find a file if the source dir actually exists
if storage.exists(path):
matched_path = storage.path(path)
if matched_path:
return matched_path
class BaseStorageFinder(BaseFinder):
"""
A base static files finder to be used to extended
with an own storage class.
"""
storage = None
def __init__(self, storage=None, *args, **kwargs):
if storage is not None:
self.storage = storage
if self.storage is None:
raise ImproperlyConfigured("The staticfiles storage finder %r "
"doesn't have a storage class "
"assigned." % self.__class__)
# Make sure we have an storage instance here.
if not isinstance(self.storage, (Storage, LazyObject)):
self.storage = self.storage()
super(BaseStorageFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the default file storage, if it's local.
"""
try:
self.storage.path('')
except NotImplementedError:
pass
else:
if self.storage.exists(path):
match = self.storage.path(path)
if all:
match = [match]
return match
return []
def list(self, ignore_patterns):
"""
List all files of the storage.
"""
for path in utils.get_files(self.storage, ignore_patterns):
yield path, self.storage
class DefaultStorageFinder(BaseStorageFinder):
"""
A static files finder that uses the default storage backend.
"""
storage = default_storage
def __init__(self, *args, **kwargs):
super(DefaultStorageFinder, self).__init__(*args, **kwargs)
base_location = getattr(self.storage, 'base_location', empty)
if not base_location:
raise ImproperlyConfigured("The storage backend of the "
"staticfiles finder %r doesn't have "
"a valid location." % self.__class__)
def find(path, all=False):
"""
Find a static file with the given path using all enabled finders.
If ``all`` is ``False`` (default), return the first matching
absolute path (or ``None`` if no match). Otherwise return a list.
"""
matches = []
for finder in get_finders():
result = finder.find(path, all=all)
if not all and result:
return result
if not isinstance(result, (list, tuple)):
result = [result]
matches.extend(result)
if matches:
return matches
# No match.
return all and [] or None
def get_finders():
for finder_path in settings.STATICFILES_FINDERS:
yield get_finder(finder_path)
def _get_finder(import_path):
"""
Imports the staticfiles finder class described by import_path, where
import_path is the full Python path to the class.
"""
Finder = import_by_path(import_path)
if not issubclass(Finder, BaseFinder):
raise ImproperlyConfigured('Finder "%s" is not a subclass of "%s"' %
(Finder, BaseFinder))
return Finder()
get_finder = memoize(_get_finder, _finders, 1)
|
SasView/sasmodels | refs/heads/master | sasmodels/conversion_table.py | 1 | """
Parameter conversion table
*CONVERSION_TABLE* gives the old model name and a dictionary of old parameter
names for each parameter in sasmodels. This is used by :mod:`.convert` to
determine the equivalent parameter set when comparing a sasmodels model to
the models defined in previous versions of SasView and sasmodels. This is now
versioned based on the version number of SasView.
When any sasmodels parameter or model name is changed, this must be modified to
account for that.
Usage::
<old_Sasview_version> : {
<new_model_name> : [
<old_model_name> ,
{
<new_param_name_1> : <old_param_name_1>,
...
<new_param_name_n> : <old_param_name_n>
}
]
}
Any future parameter and model name changes can and should be given in this
table for future compatibility.
"""
CONVERSION_TABLE = {
(3, 1, 2) : {
"adsorbed_layer": [
"Core2ndMomentModel",
{
"scale": "scale",
"second_moment": "second_moment",
"density_shell": "density_poly",
"sld_solvent": "sld_solv",
"radius": "radius_core",
"volfraction": "volf_cores",
"background": "background",
"adsorbed_amount": "ads_amount",
"sld_shell": "sld_poly"
}
],
"barbell": [
"BarBellModel",
{
"sld": "sld_barbell",
"length": "len_bar",
"radius_bell": "rad_bell",
"radius": "rad_bar",
"sld_solvent": "sld_solv"
}
],
"bcc_paracrystal": [
"BCCrystalModel",
{
"sld": "sldSph",
"sld_solvent": "sldSolv"
}
],
"be_polyelectrolyte": [
"BEPolyelectrolyte",
{
"ionization_degree": "alpha",
"polymer_concentration": "c",
"salt_concentration": "cs",
"virial_param": "h",
"background": "background",
"contrast_factor": "k",
"bjerrum_length": "lb",
"monomer_length": "b"
}
],
"binary_hard_sphere": [
"BinaryHSModel",
{
"sld_sm": "ss_sld",
"sld_lg": "ls_sld",
"volfraction_sm": "vol_frac_ss",
"radius_lg": "l_radius",
"radius_sm": "s_radius",
"volfraction_lg": "vol_frac_ls",
"sld_solvent": "solvent_sld"
}
],
"broad_peak": [
"BroadPeakModel",
{
"peak_pos": "q_peak",
"scale": None,
"lorentz_length": "length_l",
"porod_scale": "scale_p",
"lorentz_exp": "exponent_l",
"lorentz_scale": "scale_l",
"porod_exp": "exponent_p"
}
],
"capped_cylinder": [
"CappedCylinderModel",
{
"sld": "sld_capcyl",
"length": "len_cyl",
"radius_cap": "rad_cap",
"radius": "rad_cyl",
"sld_solvent": "sld_solv"
}
],
"core_multi_shell": [
"CoreMultiShellModel",
{
"thickness": "thick_shell",
"sld": "sld_shell",
"radius": "rad_core0",
"sld_core": "sld_core0",
"sld_solvent": "sld_solv",
"n": "n_shells",
"M0:sld_core": "M0_sld_core0",
"mtheta:sld_core": "M_theta_core0",
"mphi:sld_core": "M_phi_core0",
"M0:sld1": "M0_sld_shell1",
"mtheta:sld1": "M_theta_shell1",
"mphi:sld1": "M_phi_shell1",
"M0:sld2": "M0_sld_shell2",
"mtheta:sld2": "M_theta_shell2",
"mphi:sld2": "M_phi_shell2",
"M0:sld3": "M0_sld_shell3",
"mtheta:sld3": "M_theta_shell3",
"mphi:sld3": "M_phi_shell3",
"M0:sld4": "M0_sld_shell4",
"mtheta:sld4": "M_theta_shell4",
"mphi:sld4": "M_phi_shell4",
"M0:sld_solvent": "M0_sld_solv",
"mtheta:sld_solvent": "M_theta_solv",
"mphi:sld_solvent": "M_phi_solv",
"up:frac_i": "Up_frac_i",
"up:frac_f": "Up_frac_f",
"up:angle": "Up_theta",
}
],
"core_shell_bicelle": [
"CoreShellBicelleModel",
{
"phi": "axis_phi",
"sld_core": "core_sld",
"sld_rim": "rim_sld",
"thick_face": "face_thick",
"sld_solvent": "solvent_sld",
"thick_rim": "rim_thick",
"sld_face": "face_sld",
"theta": "axis_theta"
}
],
"core_shell_cylinder": [
"CoreShellCylinderModel",
{
"theta": "axis_theta",
"phi": "axis_phi",
"sld_shell": "shell_sld",
"sld_solvent": "solvent_sld",
"sld_core": "core_sld"
}
],
"core_shell_ellipsoid:1": [
"CoreShellEllipsoidModel",
{
"sld_core": "sld_core",
"sld_shell": "sld_shell",
"sld_solvent": "sld_solvent",
"radius_equat_core": "equat_core",
"x_core": "polar_core",
"thick_shell": "equat_shell",
"x_polar_shell": "polar_shell",
"theta": "axis_theta",
"phi": "axis_phi",
}
],
"core_shell_ellipsoid": [
"CoreShellEllipsoidXTModel",
{
"sld_core": "sld_core",
"sld_shell": "sld_shell",
"sld_solvent": "sld_solvent",
"radius_equat_core": "equat_core",
"thick_shell": "T_shell",
"x_core": "X_core",
"x_polar_shell": "XpolarShell",
"theta": "axis_theta",
"phi": "axis_phi",
}
],
"core_shell_parallelepiped": [
"CSParallelepipedModel",
{
"sld_core": "sld_pcore",
"sld_a": "sld_rimA",
"sld_b": "sld_rimB",
"sld_c": "sld_rimC",
"sld_solvent": "sld_solv",
"length_a": "shortA",
"length_b": "midB",
"length_c": "longC",
"thick_rim_a": "rimA",
"thick_rim_c": "rimC",
"thick_rim_b": "rimB",
"theta": "parallel_theta",
"phi": "parallel_phi",
"psi": "parallel_psi",
}
],
"core_shell_sphere": [
"CoreShellModel",
{
"sld_core": "core_sld",
"sld_shell": "shell_sld",
"sld_solvent": "solvent_sld",
"M0:sld_core": "M0_sld_core",
"mtheta:sld_core": "M_theta_core",
"mphi:sld_core": "M_phi_core",
"M0:sld_shell": "M0_sld_shell",
"mtheta:sld_shell": "M_theta_shell",
"mphi:sld_shell": "M_phi_shell",
"M0:sld_solvent": "M0_sld_solv",
"mtheta:sld_solvent": "M_theta_solv",
"mphi:sld_solvent": "M_phi_solv",
"up:frac_i": "Up_frac_i",
"up:frac_f": "Up_frac_f",
"up:angle": "Up_theta"
}
],
"correlation_length": [
"CorrLength",
{
"porod_scale": "scale_p",
"lorentz_scale": "scale_l",
"porod_exp": "exponent_p",
"lorentz_exp": "exponent_l",
"cor_length": "length_l"
},
"CorrLengthModel"
],
"cylinder": [
"CylinderModel",
{
"sld": "sldCyl",
"theta": "cyl_theta",
"phi": "cyl_phi",
"sld_solvent": "sldSolv",
"M0:sld": "M0_sld_cyl",
"mtheta:sld": "M_theta_cyl",
"mphi:sld": "M_phi_cyl",
"M0:sld_solvent": "M0_sld_solv",
"mtheta:sld_solvent": "M_theta_solv",
"mphi:sld_solvent": "M_phi_solv",
"up:frac_i": "Up_frac_i",
"up:frac_f": "Up_frac_f",
"up:angle": "Up_theta"
}
],
"dab": [
"DABModel",
{
"cor_length": "length"
}
],
"ellipsoid": [
"EllipsoidModel",
{
"phi": "axis_phi",
"radius_equatorial": "radius_b",
"sld": "sldEll",
"theta": "axis_theta",
"radius_polar": "radius_a",
"sld_solvent": "sldSolv"
}
],
"elliptical_cylinder": [
"EllipticalCylinderModel",
{
"axis_ratio": "r_ratio",
"radius_minor": "r_minor",
"sld": "sldCyl",
"sld_solvent": "sldSolv",
"theta": "cyl_theta",
"phi": "cyl_phi",
"psi": "cyl_psi",
}
],
"fcc_paracrystal": [
"FCCrystalModel",
{
"sld": "sldSph",
"sld_solvent": "sldSolv"
}
],
"flexible_cylinder": [
"FlexibleCylinderModel",
{
"sld": "sldCyl",
"sld_solvent": "sldSolv"
}
],
"flexible_cylinder_elliptical": [
"FlexCylEllipXModel",
{
"sld": "sldCyl",
"sld_solvent": "sldSolv"
}
],
"fractal": [
"FractalModel",
{
"sld_block": "sldBlock",
"radius": "radius",
"cor_length": "cor_length",
"sld_solvent": "sldSolv",
"fractal_dim": "fractal_dim"
}
],
"fractal_core_shell": [
"FractalCoreShell",
{
"sld_core": "core_sld",
"sld_shell": "shell_sld",
"sld_solvent": "solvent_sld",
"radius": "radius",
"thickness": "thickness",
"fractal_dim": "frac_dim",
"cor_length": "cor_length",
"volfraction": "volfraction",
},
"FractalCoreShellModel"
],
"fuzzy_sphere": [
"FuzzySphereModel",
{
"sld": "sldSph",
"fuzziness": "fuzziness",
"radius": "radius",
"sld_solvent": "sldSolv"
}
],
"gauss_lorentz_gel": [
"GaussLorentzGel",
{
"gauss_scale": "scale_g",
"cor_length_dynamic": "dyn_colength",
"cor_length_static": "stat_colength",
"background": "background",
"lorentz_scale": "scale_l"
},
"GaussLorentzGelModel"
],
"gaussian_peak": [
"Peak Gauss Model",
{
"peak_pos": "q0",
"sigma": "B",
},
"PeakGaussModel",
],
"gel_fit": [
"GelFitModel",
{
"rg": "radius",
"lorentz_scale": "lScale",
"guinier_scale": "gScale",
"fractal_dim": "FractalExp",
"cor_length": "zeta",
}
],
"guinier": [
"Guinier",
{
"rg": "rg"
},
"GuinierModel",
],
"guinier_porod": [
"GuinierPorod",
{
"s": "dim",
"rg": "rg",
"porod_exp": "m",
"scale": "scale",
"background": "background"
},
"GuinierPorodModel",
],
"hardsphere": [
"HardsphereStructure",
{
"scale": "scale_factor",
"radius_effective": "effect_radius",
}
],
"hayter_msa": [
"HayterMSAStructure",
{
"scale": "scale_factor",
"radius_effective": "effect_radius",
"volfraction": "volfraction",
"charge": "charge",
"temperature": "temperature",
"concentration_salt": "saltconc",
"dielectconst": "dielectconst",
}
],
"hollow_cylinder": [
"HollowCylinderModel",
{
"sld": "sldCyl",
"sld_solvent": "sldSolv",
"radius": "core_radius",
"thickness": "radius",
"length": "length",
"theta": "axis_theta",
"phi": "axis_phi",
}
],
"hollow_rectangular_prism": [
"RectangularHollowPrismModel",
{
"sld": "sldPipe",
"sld_solvent": "sldSolv",
"length_a": "short_side",
"b2a_ratio": "b2a_ratio",
"c2a_ratio": "c2a_ratio",
"thickness": "thickness",
}
],
"hollow_rectangular_prism_thin_walls": [
"RectangularHollowPrismInfThinWallsModel",
{
"sld": "sldPipe",
"sld_solvent": "sldSolv",
"length_a": "short_side",
"b2a_ratio": "b2a_ratio",
"c2a_ratio": "c2a_ratio",
}
],
"lamellar": [
"LamellarModel",
{
"sld": "sld_bi",
"sld_solvent": "sld_sol",
"thickness": "bi_thick"
}
],
"lamellar_hg": [
"LamellarFFHGModel",
{
"sld": "sld_tail",
"sld_solvent": "sld_solvent",
"sld_head": "sld_head",
"length_tail": "t_length",
"length_head": "h_thickness"
}
],
"lamellar_hg_stack_caille": [
"LamellarPSHGModel",
{
"sld": "sld_tail",
"sld_head": "sld_head",
"sld_solvent": "sld_solvent",
"length_tail": "deltaT",
"length_head": "deltaH",
"d_spacing": "spacing",
"Caille_parameter": "caille",
"Nlayers": "n_plates",
}
],
"lamellar_stack_caille": [
"LamellarPSModel",
{
"sld": "sld_bi",
"sld_solvent": "sld_sol",
"thickness": "delta",
"d_spacing": "spacing",
"Caille_parameter": "caille",
"Nlayers": "n_plates",
}
],
"lamellar_stack_paracrystal": [
"LamellarPCrystalModel",
{
"sld": "sld_layer",
"sld_solvent": "sld_solvent",
"thickness": "thickness",
"d_spacing": "spacing",
"sigma_d": "pd_spacing",
"Nlayers": "Nlayers",
}
],
"line": [
"LineModel",
{
"slope": "B",
"scale": None,
"background": None,
"intercept": "A"
}
],
"linear_pearls": [
"LinearPearlsModel",
{
"sld": "sld_pearl",
"sld_solvent": "sld_solv",
"edge_sep": "edge_separation"
}
],
"lorentz": [
"Lorentz",
{
"cor_length": "length"
},
"LorentzModel",
],
"mass_fractal": [
"MassFractalModel",
{
"cutoff_length": "co_length",
"radius": "radius",
"fractal_dim_mass": "mass_dim"
}
],
"mass_surface_fractal": [
"MassSurfaceFractal",
{
"rg_cluster": "cluster_rg",
"fractal_dim_mass": "mass_dim",
"radius": "radius",
"fractal_dim_surf": "surface_dim",
"rg_primary": "primary_rg"
}
],
"mono_gauss_coil": [
"Debye",
{
"rg": "rg",
"i_zero": "scale",
"background": "background",
},
"DebyeModel",
],
"multilayer_vesicle": [
"MultiShellModel",
{
"radius": "core_radius",
"sld_solvent": "core_sld",
"n_shells": "n_pairs",
"thick_shell": "s_thickness",
"sld": "shell_sld",
"thick_solvent": "w_thickness",
}
],
"onion": [
"OnionExpShellModel",
{
"n_shells": "n_shells",
"A": "A_shell",
"sld_core": "sld_core0",
"radius_core": "rad_core0",
"sld_solvent": "sld_solv",
"thickness": "thick_shell",
"sld_in": "sld_in_shell",
"sld_out": "sld_out_shell"
}
],
"parallelepiped": [
"ParallelepipedModel",
{
"phi": "parallel_phi",
"psi": "parallel_psi",
"sld_solvent": "sldSolv",
"length_a": "short_a",
"length_b": "short_b",
"sld": "sldPipe",
"theta": "parallel_theta",
"length_c": "long_c",
"M0:sld": "M0_sld_pipe",
"mtheta:sld": "M_theta_pipe",
"mphi:sld": "M_phi_pipe",
"M0:sld_solvent": "M0_sld_solv",
"mtheta:sld_solvent": "M_theta_solv",
"mphi:sld_solvent": "M_phi_solv",
"up:frac_i": "Up_frac_i",
"up:frac_f": "Up_frac_f",
"up:angle": "Up_theta",
}
],
"peak_lorentz": [
"Peak Lorentz Model",
{
"peak_pos": "q0",
"peak_hwhm": "B"
},
"PeakLorentzModel",
],
"pearl_necklace": [
"PearlNecklaceModel",
{
"scale": "scale",
"thick_string": "thick_string",
"sld_string": "sld_string",
"sld_solvent": "sld_solv",
"edge_sep": "edge_separation",
"num_pearls": "num_pearls",
"radius": "radius",
"background": "background",
"sld": "sld_pearl"
}
],
"poly_gauss_coil": [
"Poly_GaussCoil",
{
"rg": "rg",
"polydispersity": "poly_m",
"i_zero": "scale",
"background": "background",
}
],
"polymer_excl_volume": [
"PolymerExclVolume",
{
"rg": "rg",
"scale": "scale",
"background": "background",
"porod_exp": "m"
}
],
"polymer_micelle": [
"MicelleSphCoreModel",
{
"sld_corona": "rho_corona",
"sld_solvent": "rho_solv",
"sld_core": "rho_core",
"ndensity": "ndensity",
"v_core": "v_core",
"v_corona": "v_corona",
"radius_core": "radius_core",
"rg": "radius_gyr",
"d_penetration": "d_penetration",
"n_aggreg": "n_aggreg",
}
],
"porod": [
"PorodModel",
{
"scale": "scale",
"background": "background"
}
],
"power_law": [
"PowerLawAbsModel",
{
"scale": "scale",
"background": "background",
"power": "m"
}
],
"pringle": [
"PringlesModel",
{
"scale": "scale",
"sld_solvent": "sld_solvent",
"thickness": "thickness",
"beta": "beta",
"radius": "radius",
"background": "background",
"alpha": "alpha",
"sld": "sld_pringle"
}
],
"raspberry": [
"RaspBerryModel",
{
"volfraction_lg": "volf_Lsph",
"volfraction_sm": "volf_Ssph",
"radius_sm": "radius_Ssph",
"radius_lg": "radius_Lsph",
"sld_lg": "sld_Lsph",
"sld_sm": "sld_Ssph",
"sld_solvent": "sld_solv",
"surface_fraction": "surfrac_Ssph",
"penetration": "delta_Ssph"
}
],
"rectangular_prism": [
"RectangularPrismModel",
{
"sld": "sldPipe",
"length_a": "short_side",
"b2a_ratio": "b2a_ratio",
"c2a_ratio": "c2a_ratio",
"sld_solvent": "sldSolv"
}
],
"rpa": [
"RPA10Model",
{
"K12": "Kab", "K13": "Kac", "K14": "Kad",
"K23": "Kbc", "K24": "Kbd", "K34": "Kcd",
"N1": "Na", "N2": "Nb", "N3": "Nc", "N4": "Nd",
"L1": "La", "L2": "Lb", "L3": "Lc", "L4": "Ld",
"v1": "va", "v2": "vb", "v3": "vc", "v4": "vd",
"b1": "ba", "b2": "bb", "b3": "bc", "b4": "bd",
"Phi1": "Phia", "Phi2": "Phib", "Phi3": "Phic", "Phi4": "Phid",
"case_num": "lcase_n"
}
],
"sc_paracrystal": [
"SCCrystalModel",
{
"sld": "sldSph",
"sld_solvent": "sldSolv"
}
],
"sphere": [
"SphereModel",
{
"sld": "sldSph",
"radius": "radius",
"sld_solvent": "sldSolv",
"M0:sld": "M0_sld_sph",
"mtheta:sld": "M_theta_sph",
"mphi:sld": "M_phi_sph",
"M0:sld_solvent": "M0_sld_solv",
"mtheta:sld_solvent": "M_theta_solv",
"mphi:sld_solvent": "M_phi_solv",
"up:frac_i": "Up_frac_i",
"up:frac_f": "Up_frac_f",
"up:angle": "Up_theta"
}
],
"spherical_sld": [
"SphericalSLDModel",
# Be lazy and use a generator expression to define
# sld1: sld_flat0, ...
# thickness1: thick_flat0, ...
# interface1: thick_inter0, ...
# shape1: func_inter0, ...
# nu1: nu_inter0, ...
# but override thickness1 => rad_cor0 and sld1 => sld_core0.
# Note: explicit key,value pairs given by **{...} override the
# keys from the gnerator expression ((k,v) for k,v in seq) when
# used as dict((generator), **{...})
dict(((field_new+str(index+1), field_old+str(index))
for field_new, field_old in [("sld", "sld_flat"),
("thickness", "thick_flat"),
("interface", "thick_inter"),
("shape", "func_inter"),
("nu", "nu_inter"),]
for index in range(11)),
**{
"n_shells": "n_shells",
"n_steps": "npts_inter",
"sld_solvent": "sld_solv",
"thickness1": "rad_core0",
"sld1": "sld_core0",
})
],
"squarewell": [
"SquareWellStructure",
{
"scale": "scale_factor",
"radius_effective": "effect_radius",
"wellwidth": "wellwidth",
"welldepth": "welldepth",
}
],
"stacked_disks": [
"StackedDisksModel",
{
"phi": "axis_phi",
"sld_layer": "layer_sld",
"sld_core": "core_sld",
"theta": "axis_theta",
"sld_solvent": "solvent_sld",
"n_stacking": "n_stacking",
"thick_layer": "layer_thick",
"thick_core": "core_thick",
}
],
"star_polymer": [
"StarPolymer",
{
"arms": "arms",
"rg_squared": "R2"
}
],
"stickyhardsphere": [
"StickyHSStructure",
{
"scale": "scale_factor",
"radius_effective": "effect_radius",
}
],
"surface_fractal": [
"SurfaceFractalModel",
{
"cutoff_length": "co_length",
"radius": "radius",
"fractal_dim_surf": "surface_dim"
}
],
"teubner_strey": [
"TeubnerStreyModel",
{
# Note: parameters are completely rewritten in convert.py
"volfraction_a": "volfraction_a",
"sld_a": "sld_a",
"sld_b": "sld_b",
"d": "d",
"xi": "xi",
}
],
"triaxial_ellipsoid": [
"TriaxialEllipsoidModel",
{
"phi": "axis_phi",
"radius_equat_minor": "semi_axisA",
"radius_polar": "semi_axisC",
"radius_equat_major": "semi_axisB",
"sld_solvent": "sldSolv",
"psi": "axis_psi",
"sld": "sldEll",
"theta": "axis_theta"
}
],
"two_lorentzian": [
"TwoLorentzian",
{
"lorentz_scale_1": "scale_1",
"lorentz_scale_2": "scale_2",
"lorentz_exp_1": "exponent_1",
"lorentz_exp_2": "exponent_2",
"lorentz_length_2": "length_2",
"lorentz_length_1": "length_1",
"background": "background"
},
"TwoLorentzianModel",
],
"two_power_law": [
"TwoPowerLaw",
{
"coefficent_1": "coef_A",
"power_2": "power2",
"power_1": "power1",
"background": "background",
"crossover": "qc"
},
"TwoPowerLawModel",
],
"unified_power_Rg": [
"UnifiedPowerRg",
dict(((field_new+str(index), field_old+str(index))
for field_new, field_old in [("rg", "Rg"),
("power", "power"),
("G", "G"),
("B", "B"),]
for index in range(11)),
**{
"background": "background",
"scale": "scale",
}),
"UnifiedPowerRgModel",
],
"vesicle": [
"VesicleModel",
{
"sld": "shell_sld",
"sld_solvent": "solv_sld"
}
]
}
}
|
glennlive/gnuradio-wg-grc | refs/heads/master | docs/sphinx/hieroglyph/test/test_hierglyph.py | 47 | import unittest
from hieroglyph.hieroglyph import first_paragraph_indent, gather_lines, unindent
__author__ = 'Robert Smallshire'
class UnindentTests(unittest.TestCase):
def test_zero_lines(self):
source = []
expected = []
actual = unindent(source)
self.assertEqual(actual, expected)
def test_one_zero_indent_line(self):
source = ["First line"]
expected = [(0, "First line")]
actual = unindent(source)
self.assertEqual(actual, expected)
def test_two_zero_indent_lines(self):
source = ["First line",
"Second line"]
expected = [(0, "First line"),
(0, "Second line")]
actual = unindent(source)
self.assertEqual(actual, expected)
def test_two_indented_lines(self):
source = [" First line",
" Second line"]
expected = [(4, "First line"),
(6, "Second line")]
actual = unindent(source)
self.assertEqual(actual, expected)
def test_whitespace_line(self):
source = [" "]
expected = [(4, "")]
actual = unindent(source)
self.assertEqual(actual, expected)
def test_tab_line(self):
source = ["\tHello"]
expected = [(1, "Hello")]
actual = unindent(source)
self.assertEqual(actual, expected)
class FirstParagraphIndentTests(unittest.TestCase):
def test_zero_lines(self):
source = []
expected = []
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_single_line_non_indented_comment(self):
source = [(0, "A single line comment")]
expected = [(0, "A single line comment")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_single_line_indented_comment(self):
source = [(4, "A single line comment")]
expected = [(4, "A single line comment")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_double_line_non_indented_comment(self):
source = [(0, "The first line"),
(0, "The second line")]
expected = [(0, "The first line"),
(0, "The second line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_double_line_indented_comment(self):
source = [(4, "The first line"),
(4, "The second line")]
expected = [(4, "The first line"),
(4, "The second line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_first_line_indent(self):
source = [(4, "The first line"),
(0, "The second line")]
expected = [(4, "The first line"),
(0, "The second line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_first_line_non_indent(self):
source = [(0, "The first line"),
(4, "The second line")]
expected = [(4, "The first line"),
(4, "The second line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_increasing_indent(self):
source = [(0, "The first line"),
(4, "The second line"),
(8, "The third line")]
expected = [(4, "The first line"),
(4, "The second line"),
(8, "The third line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_separate_paragraphs(self):
source = [(0, "This is the first paragraph"),
(0, ""),
(4, "This is the second paragraph")]
expected = [(0, "This is the first paragraph"),
(0, ""),
(4, "This is the second paragraph")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_separate_paragraphs_indented(self):
source = [(4, "This is the first paragraph"),
(4, ""),
(8, "This is the second paragraph")]
expected = [(4, "This is the first paragraph"),
(4, ""),
(8, "This is the second paragraph")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_separated_lines_first_line_non_indented(self):
source = [(0, "The first line"),
(0, ""),
(4, "The third line")]
expected = [(0, "The first line"),
(0, ""),
(4, "The third line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
def test_separated_lines_first_line_indented(self):
source = [(4, "The first line"),
(4, ""),
(4, "The third line")]
expected = [(4, "The first line"),
(4, ""),
(4, "The third line")]
actual = first_paragraph_indent(source)
self.assertEqual(actual, expected)
class GatherLinesTests(unittest.TestCase):
def test_empty(self):
source = []
expected = []
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_one_liner(self):
source = [(0, 'One liner')]
expected = [(0, ['One liner'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_two_liner(self):
source = [(0, 'First line'),
(0, 'Second line')]
expected = [(0, ['First line',
'Second line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_separated_lines(self):
source = [(0, 'First line'),
(0, ''),
(0, 'Third line')]
expected = [(0, ['First line',
'']),
(0, ['Third line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_separated_multi_lines(self):
source = [(0, 'First line'),
(0, 'Second line'),
(0, ''),
(0, 'Fourth line'),
(0, 'Fifth line')]
expected = [(0, ['First line',
'Second line',
'']),
(0, ['Fourth line',
'Fifth line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_indented_lines(self):
source = [(0, 'First line'),
(4, 'Second line')]
expected = [(0, ['First line']),
(4, ['Second line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_dedented_lines(self):
source = [(4, 'First line'),
(0, 'Second line')]
expected = [(4, ['First line']),
(0, ['Second line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_indented_multi_lines(self):
source = [(0, 'First line'),
(0, 'Second line'),
(4, 'Third line'),
(4, 'Fourth line')]
expected = [(0, ['First line',
'Second line']),
(4, ['Third line',
'Fourth line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_dedented_multi_lines(self):
source = [(4, 'First line'),
(4, 'Second line'),
(0, 'Third line'),
(0, 'Fourth line')]
expected = [(4, ['First line',
'Second line']),
(0, ['Third line',
'Fourth line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_indented_separated_multi_lines(self):
source = [(0, 'First line'),
(0, 'Second line'),
(0, ''),
(4, 'Fourth line'),
(4, 'Fifth line')]
expected = [(0, ['First line',
'Second line',
'']),
(4, ['Fourth line',
'Fifth line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
def test_dedented_separated_multi_lines(self):
source = [(4, 'First line'),
(4, 'Second line'),
(4, ''),
(0, 'Fourth line'),
(0, 'Fifth line')]
expected = [(4, ['First line',
'Second line',
'']),
(0, ['Fourth line',
'Fifth line'])]
actual = gather_lines(source)
self.assertEqual(actual, expected)
|
appliedx/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/tests/test_capa_module.py | 34 | # -*- coding: utf-8 -*-
"""
Tests of the Capa XModule
"""
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
import datetime
import json
import random
import os
import textwrap
import unittest
import ddt
from mock import Mock, patch, DEFAULT
import webob
from webob.multidict import MultiDict
import xmodule
from xmodule.tests import DATA_DIR
from capa import responsetypes
from capa.responsetypes import (StudentInputError, LoncapaProblemError,
ResponseError)
from capa.xqueue_interface import XQueueInterface
from xmodule.capa_module import CapaModule, CapaDescriptor, ComplexEncoder
from opaque_keys.edx.locations import Location
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from . import get_test_system
from pytz import UTC
from capa.correctmap import CorrectMap
from ..capa_base_constants import RANDOMIZATION
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = textwrap.dedent("""\
<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal places?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
""")
num = 0
@classmethod
def next_num(cls):
cls.num += 1
return cls.num
@classmethod
def input_key(cls, response_num=2, input_num=1):
"""
Return the input key to use when passing GET parameters
"""
return "input_" + cls.answer_key(response_num, input_num)
@classmethod
def answer_key(cls, response_num=2, input_num=1):
"""
Return the key stored in the capa problem answer dict
"""
return (
"%s_%d_%d" % (
"-".join(['i4x', 'edX', 'capa_test', 'problem', 'SampleProblem%d' % cls.num]),
response_num,
input_num
)
)
@classmethod
def create(cls,
attempts=None,
problem_state=None,
correct=False,
xml=None,
override_get_score=True,
**kwargs
):
"""
All parameters are optional, and are added to the created problem if specified.
Arguments:
graceperiod:
due:
max_attempts:
showanswer:
force_save_button:
rerandomize: all strings, as specified in the policy for the problem
problem_state: a dict to to be serialized into the instance_state of the
module.
attempts: also added to instance state. Will be converted to an int.
"""
location = Location(
"edX",
"capa_test",
"2012_Fall",
"problem",
"SampleProblem{0}".format(cls.next_num()),
None
)
if xml is None:
xml = cls.sample_problem_xml
field_data = {'data': xml}
field_data.update(kwargs)
descriptor = Mock(weight="1")
if problem_state is not None:
field_data.update(problem_state)
if attempts is not None:
# converting to int here because I keep putting "0" and "1" in the tests
# since everything else is a string.
field_data['attempts'] = int(attempts)
system = get_test_system()
system.render_template = Mock(return_value="<div>Test Template HTML</div>")
module = CapaModule(
descriptor,
system,
DictFieldData(field_data),
ScopeIds(None, None, location, location),
)
if override_get_score:
if correct:
# TODO: probably better to actually set the internal state properly, but...
module.get_score = lambda: {'score': 1, 'total': 1}
else:
module.get_score = lambda: {'score': 0, 'total': 1}
return module
class CapaFactoryWithFiles(CapaFactory):
"""
A factory for creating a Capa problem with files attached.
"""
sample_problem_xml = textwrap.dedent("""\
<problem>
<coderesponse queuename="BerkeleyX-cs188x">
<!-- actual filenames here don't matter for server-side tests,
they are only acted upon in the browser. -->
<filesubmission
points="25"
allowed_files="prog1.py prog2.py prog3.py"
required_files="prog1.py prog2.py prog3.py"
/>
<codeparam>
<answer_display>
If you're having trouble with this Project,
please refer to the Lecture Slides and attend office hours.
</answer_display>
<grader_payload>{"project": "p3"}</grader_payload>
</codeparam>
</coderesponse>
<customresponse>
<text>
If you worked with a partner, enter their username or email address. If you
worked alone, enter None.
</text>
<textline points="0" size="40" correct_answer="Your partner's username or 'None'"/>
<answer type="loncapa/python">
correct=['correct']
s = str(submission[0]).strip()
if submission[0] == '':
correct[0] = 'incorrect'
</answer>
</customresponse>
</problem>
""")
@ddt.ddt
class CapaModuleTest(unittest.TestCase):
def setUp(self):
super(CapaModuleTest, self).setUp()
now = datetime.datetime.now(UTC)
day_delta = datetime.timedelta(days=1)
self.yesterday_str = str(now - day_delta)
self.today_str = str(now)
self.tomorrow_str = str(now + day_delta)
# in the capa grace period format, not in time delta format
self.two_day_delta_str = "2 days"
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
def test_correct(self):
"""
Check that the factory creates correct and incorrect problems properly.
"""
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create(correct=True)
self.assertEqual(other_module.get_score()['score'], 1)
def test_get_score(self):
"""
Do 1 test where the internals of get_score are properly set
@jbau Note: this obviously depends on a particular implementation of get_score, but I think this is actually
useful as unit-code coverage for this current implementation. I don't see a layer where LoncapaProblem
is tested directly
"""
from capa.correctmap import CorrectMap
student_answers = {'1_2_1': 'abcd'}
correct_map = CorrectMap(answer_id='1_2_1', correctness="correct", npoints=0.9)
module = CapaFactory.create(correct=True, override_get_score=False)
module.lcp.correct_map = correct_map
module.lcp.student_answers = student_answers
self.assertEqual(module.get_score()['score'], 0.9)
other_correct_map = CorrectMap(answer_id='1_2_1', correctness="incorrect", npoints=0.1)
other_module = CapaFactory.create(correct=False, override_get_score=False)
other_module.lcp.correct_map = other_correct_map
other_module.lcp.student_answers = student_answers
self.assertEqual(other_module.get_score()['score'], 0.1)
def test_showanswer_default(self):
"""
Make sure the show answer logic does the right thing.
"""
# default, no due date, showanswer 'closed', so problem is open, and show_answer
# not visible.
problem = CapaFactory.create()
self.assertFalse(problem.answer_available())
def test_showanswer_attempted(self):
problem = CapaFactory.create(showanswer='attempted')
self.assertFalse(problem.answer_available())
problem.attempts = 1
self.assertTrue(problem.answer_available())
def test_showanswer_closed(self):
# can see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertTrue(used_all_attempts.answer_available())
# can see after due date
after_due_date = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(after_due_date.answer_available())
# can't see because attempts left
attempts_left_open = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# Can't see because grace period hasn't expired
still_in_grace = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_correct_or_past_due(self):
"""
With showanswer="correct_or_past_due" should show answer after the answer is correct
or after the problem is closed for everyone--e.g. after due date + grace period.
"""
# can see because answer is correct, even with due date in the future
answer_correct = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="0",
due=self.tomorrow_str,
correct=True)
self.assertTrue(answer_correct.answer_available())
# can see after due date, even when answer isn't correct
past_due_date = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can also see after due date when answer _is_ correct
past_due_date_correct = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str,
correct=True)
self.assertTrue(past_due_date_correct.answer_available())
# Can't see because grace period hasn't expired and answer isn't correct
still_in_grace = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_past_due(self):
"""
With showanswer="past_due" should only show answer after the problem is closed
for everyone--e.g. after due date + grace period.
"""
# can't see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertFalse(used_all_attempts.answer_available())
# can see after due date
past_due_date = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can't see because attempts left
attempts_left_open = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# Can't see because grace period hasn't expired, even though have no more
# attempts.
still_in_grace = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_finished(self):
"""
With showanswer="finished" should show answer after the problem is closed,
or after the answer is correct.
"""
# can see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertTrue(used_all_attempts.answer_available())
# can see after due date
past_due_date = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can't see because attempts left and wrong
attempts_left_open = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# _can_ see because attempts left and right
correct_ans = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.tomorrow_str,
correct=True)
self.assertTrue(correct_ans.answer_available())
# Can see even though grace period hasn't expired, because have no more
# attempts.
still_in_grace = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertTrue(still_in_grace.answer_available())
def test_closed(self):
# Attempts < Max attempts --> NOT closed
module = CapaFactory.create(max_attempts="1", attempts="0")
self.assertFalse(module.closed())
# Attempts < Max attempts --> NOT closed
module = CapaFactory.create(max_attempts="2", attempts="1")
self.assertFalse(module.closed())
# Attempts = Max attempts --> closed
module = CapaFactory.create(max_attempts="1", attempts="1")
self.assertTrue(module.closed())
# Attempts > Max attempts --> closed
module = CapaFactory.create(max_attempts="1", attempts="2")
self.assertTrue(module.closed())
# Max attempts = 0 --> closed
module = CapaFactory.create(max_attempts="0", attempts="2")
self.assertTrue(module.closed())
# Past due --> closed
module = CapaFactory.create(max_attempts="1", attempts="0",
due=self.yesterday_str)
self.assertTrue(module.closed())
def test_parse_get_params(self):
# Valid GET param dict
# 'input_5' intentionally left unset,
valid_get_dict = MultiDict({
'input_1': 'test',
'input_1_2': 'test',
'input_1_2_3': 'test',
'input_[]_3': 'test',
'input_4': None,
'input_6': 5
})
result = CapaModule.make_dict_of_responses(valid_get_dict)
# Expect that we get a dict with "input" stripped from key names
# and that we get the same values back
for key in result.keys():
original_key = "input_" + key
self.assertTrue(original_key in valid_get_dict,
"Output dict should have key %s" % original_key)
self.assertEqual(valid_get_dict[original_key], result[key])
# Valid GET param dict with list keys
# Each tuple represents a single parameter in the query string
valid_get_dict = MultiDict((('input_2[]', 'test1'), ('input_2[]', 'test2')))
result = CapaModule.make_dict_of_responses(valid_get_dict)
self.assertTrue('2' in result)
self.assertEqual(['test1', 'test2'], result['2'])
# If we use [] at the end of a key name, we should always
# get a list, even if there's just one value
valid_get_dict = MultiDict({'input_1[]': 'test'})
result = CapaModule.make_dict_of_responses(valid_get_dict)
self.assertEqual(result['1'], ['test'])
# If we have no underscores in the name, then the key is invalid
invalid_get_dict = MultiDict({'input': 'test'})
with self.assertRaises(ValueError):
result = CapaModule.make_dict_of_responses(invalid_get_dict)
# Two equivalent names (one list, one non-list)
# One of the values would overwrite the other, so detect this
# and raise an exception
invalid_get_dict = MultiDict({'input_1[]': 'test 1',
'input_1': 'test 2'})
with self.assertRaises(ValueError):
result = CapaModule.make_dict_of_responses(invalid_get_dict)
def test_check_problem_correct(self):
module = CapaFactory.create(attempts=1)
# Simulate that all answers are marked correct, no matter
# what the input is, by patching CorrectMap.is_correct()
# Also simulate rendering the HTML
# TODO: pep8 thinks the following line has invalid syntax
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct, \
patch('xmodule.capa_module.CapaModule.get_problem_html') as mock_html:
mock_is_correct.return_value = True
mock_html.return_value = "Test HTML"
# Check the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'correct')
# Expect that we get the (mocked) HTML
self.assertEqual(result['contents'], 'Test HTML')
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 2)
def test_check_problem_incorrect(self):
module = CapaFactory.create(attempts=0)
# Simulate marking the input incorrect
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct:
mock_is_correct.return_value = False
# Check the problem
get_request_dict = {CapaFactory.input_key(): '0'}
result = module.check_problem(get_request_dict)
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'incorrect')
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 1)
def test_check_problem_closed(self):
module = CapaFactory.create(attempts=3)
# Problem closed -- cannot submit
# Simulate that CapaModule.closed() always returns True
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
with self.assertRaises(xmodule.exceptions.NotFoundError):
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, 3)
@ddt.data(
RANDOMIZATION.ALWAYS,
'true'
)
def test_check_problem_resubmitted_with_randomize(self, rerandomize):
# Randomize turned on
module = CapaFactory.create(rerandomize=rerandomize, attempts=0)
# Simulate that the problem is completed
module.done = True
# Expect that we cannot submit
with self.assertRaises(xmodule.exceptions.NotFoundError):
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, 0)
@ddt.data(
RANDOMIZATION.NEVER,
'false',
RANDOMIZATION.PER_STUDENT
)
def test_check_problem_resubmitted_no_randomize(self, rerandomize):
# Randomize turned off
module = CapaFactory.create(rerandomize=rerandomize, attempts=0, done=True)
# Expect that we can submit successfully
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
self.assertEqual(result['success'], 'correct')
# Expect that number of attempts IS incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_queued(self):
module = CapaFactory.create(attempts=1)
# Simulate that the problem is queued
multipatch = patch.multiple(
'capa.capa_problem.LoncapaProblem',
is_queued=DEFAULT,
get_recentmost_queuetime=DEFAULT
)
with multipatch as values:
values['is_queued'].return_value = True
values['get_recentmost_queuetime'].return_value = datetime.datetime.now(UTC)
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertIn('You must wait', result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_with_files(self):
# Check a problem with uploaded files, using the check_problem API.
# pylint: disable=protected-access
# The files we'll be uploading.
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
module = CapaFactoryWithFiles.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok"))
module.system.xqueue['interface'] = xqueue_interface
# Create a request dictionary for check_problem.
get_request_dict = {
CapaFactoryWithFiles.input_key(response_num=2): fileobjs,
CapaFactoryWithFiles.input_key(response_num=3): 'None',
}
module.check_problem(get_request_dict)
# _http_post is called like this:
# _http_post(
# 'http://example.com/xqueue/xqueue/submit/',
# {
# 'xqueue_header': '{"lms_key": "df34fb702620d7ae892866ba57572491", "lms_callback_url": "/", "queue_name": "BerkeleyX-cs188x"}',
# 'xqueue_body': '{"student_info": "{\\"anonymous_student_id\\": \\"student\\", \\"submission_time\\": \\"20131117183318\\"}", "grader_payload": "{\\"project\\": \\"p3\\"}", "student_response": ""}',
# },
# files={
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/asset.html'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/asset.html', mode 'r' at 0x49c5f60>,
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/image.jpg'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/image.jpg', mode 'r' at 0x49c56f0>,
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/textbook.pdf'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/textbook.pdf', mode 'r' at 0x49c5a50>,
# },
# )
self.assertEqual(xqueue_interface._http_post.call_count, 1)
_, kwargs = xqueue_interface._http_post.call_args
self.assertItemsEqual(fpaths, kwargs['files'].keys())
for fpath, fileobj in kwargs['files'].iteritems():
self.assertEqual(fpath, fileobj.name)
def test_check_problem_with_files_as_xblock(self):
# Check a problem with uploaded files, using the XBlock API.
# pylint: disable=protected-access
# The files we'll be uploading.
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
module = CapaFactoryWithFiles.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok"))
module.system.xqueue['interface'] = xqueue_interface
# Create a webob Request with the files uploaded.
post_data = []
for fname, fileobj in zip(fnames, fileobjs):
post_data.append((CapaFactoryWithFiles.input_key(response_num=2), (fname, fileobj)))
post_data.append((CapaFactoryWithFiles.input_key(response_num=3), 'None'))
request = webob.Request.blank("/some/fake/url", POST=post_data, content_type='multipart/form-data')
module.handle('xmodule_handler', request, 'problem_check')
self.assertEqual(xqueue_interface._http_post.call_count, 1)
_, kwargs = xqueue_interface._http_post.call_args
self.assertItemsEqual(fnames, kwargs['files'].keys())
for fpath, fileobj in kwargs['files'].iteritems():
self.assertEqual(fpath, fileobj.name)
def test_check_problem_error(self):
# Try each exception that capa_module should handle
exception_classes = [StudentInputError,
LoncapaProblemError,
ResponseError]
for exception_class in exception_classes:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class('test error')
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
expected_msg = 'Error: test error'
self.assertEqual(expected_msg, result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_other_errors(self):
"""
Test that errors other than the expected kinds give an appropriate message.
See also `test_check_problem_error` for the "expected kinds" or errors.
"""
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Ensure that DEBUG is on
module.system.DEBUG = True
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
error_msg = u"Superterrible error happened: ☠"
mock_grade.side_effect = Exception(error_msg)
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue(error_msg in result['success'])
def test_check_problem_error_nonascii(self):
# Try each exception that capa_module should handle
exception_classes = [StudentInputError,
LoncapaProblemError,
ResponseError]
for exception_class in exception_classes:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class(u"ȧƈƈḗƞŧḗḓ ŧḗẋŧ ƒǿř ŧḗşŧīƞɠ")
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
expected_msg = u'Error: ȧƈƈḗƞŧḗḓ ŧḗẋŧ ƒǿř ŧḗşŧīƞɠ'
self.assertEqual(expected_msg, result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_error_with_staff_user(self):
# Try each exception that capa module should handle
for exception_class in [StudentInputError,
LoncapaProblemError,
ResponseError]:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user IS staff
module.system.user_is_staff = True
# Simulate answering a problem that raises an exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class('test error')
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue('test error' in result['success'])
# We DO include traceback information for staff users
self.assertTrue('Traceback' in result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_reset_problem(self):
module = CapaFactory.create(done=True)
module.new_lcp = Mock(wraps=module.new_lcp)
module.choose_new_seed = Mock(wraps=module.choose_new_seed)
# Stub out HTML rendering
with patch('xmodule.capa_module.CapaModule.get_problem_html') as mock_html:
mock_html.return_value = "<div>Test HTML</div>"
# Reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the request was successful
self.assertTrue('success' in result and result['success'])
# Expect that the problem HTML is retrieved
self.assertTrue('html' in result)
self.assertEqual(result['html'], "<div>Test HTML</div>")
# Expect that the problem was reset
module.new_lcp.assert_called_once_with(None)
def test_reset_problem_closed(self):
# pre studio default
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS)
# Simulate that the problem is closed
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
# Try to reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the problem was NOT reset
self.assertTrue('success' in result and not result['success'])
def test_reset_problem_not_done(self):
# Simulate that the problem is NOT done
module = CapaFactory.create(done=False)
# Try to reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the problem was NOT reset
self.assertTrue('success' in result and not result['success'])
def test_rescore_problem_correct(self):
module = CapaFactory.create(attempts=1, done=True)
# Simulate that all answers are marked correct, no matter
# what the input is, by patching LoncapaResponse.evaluate_answers()
with patch('capa.responsetypes.LoncapaResponse.evaluate_answers') as mock_evaluate_answers:
mock_evaluate_answers.return_value = CorrectMap(CapaFactory.answer_key(), 'correct')
result = module.rescore_problem()
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'correct')
# Expect that we get no HTML
self.assertFalse('contents' in result)
# Expect that the number of attempts is not incremented
self.assertEqual(module.attempts, 1)
def test_rescore_problem_incorrect(self):
# make sure it also works when attempts have been reset,
# so add this to the test:
module = CapaFactory.create(attempts=0, done=True)
# Simulate that all answers are marked incorrect, no matter
# what the input is, by patching LoncapaResponse.evaluate_answers()
with patch('capa.responsetypes.LoncapaResponse.evaluate_answers') as mock_evaluate_answers:
mock_evaluate_answers.return_value = CorrectMap(CapaFactory.answer_key(), 'incorrect')
result = module.rescore_problem()
# Expect that the problem is marked incorrect
self.assertEqual(result['success'], 'incorrect')
# Expect that the number of attempts is not incremented
self.assertEqual(module.attempts, 0)
def test_rescore_problem_not_done(self):
# Simulate that the problem is NOT done
module = CapaFactory.create(done=False)
# Try to rescore the problem, and get exception
with self.assertRaises(xmodule.exceptions.NotFoundError):
module.rescore_problem()
def test_rescore_problem_not_supported(self):
module = CapaFactory.create(done=True)
# Try to rescore the problem, and get exception
with patch('capa.capa_problem.LoncapaProblem.supports_rescoring') as mock_supports_rescoring:
mock_supports_rescoring.return_value = False
with self.assertRaises(NotImplementedError):
module.rescore_problem()
def _rescore_problem_error_helper(self, exception_class):
"""Helper to allow testing all errors that rescoring might return."""
# Create the module
module = CapaFactory.create(attempts=1, done=True)
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore:
mock_rescore.side_effect = exception_class(u'test error \u03a9')
result = module.rescore_problem()
# Expect an AJAX alert message in 'success'
expected_msg = u'Error: test error \u03a9'
self.assertEqual(result['success'], expected_msg)
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_rescore_problem_student_input_error(self):
self._rescore_problem_error_helper(StudentInputError)
def test_rescore_problem_problem_error(self):
self._rescore_problem_error_helper(LoncapaProblemError)
def test_rescore_problem_response_error(self):
self._rescore_problem_error_helper(ResponseError)
def test_save_problem(self):
module = CapaFactory.create(done=False)
# Save the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that answers are saved to the problem
expected_answers = {CapaFactory.answer_key(): '3.14'}
self.assertEqual(module.lcp.student_answers, expected_answers)
# Expect that the result is success
self.assertTrue('success' in result and result['success'])
def test_save_problem_closed(self):
module = CapaFactory.create(done=False)
# Simulate that the problem is closed
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
# Try to save the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that the result is failure
self.assertTrue('success' in result and not result['success'])
@ddt.data(
RANDOMIZATION.ALWAYS,
'true'
)
def test_save_problem_submitted_with_randomize(self, rerandomize):
# Capa XModule treats 'always' and 'true' equivalently
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Try to save
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that we cannot save
self.assertTrue('success' in result and not result['success'])
@ddt.data(
RANDOMIZATION.NEVER,
'false',
RANDOMIZATION.PER_STUDENT
)
def test_save_problem_submitted_no_randomize(self, rerandomize):
# Capa XModule treats 'false' and 'per_student' equivalently
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Try to save
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that we succeed
self.assertTrue('success' in result and result['success'])
def test_check_button_name(self):
# If last attempt, button name changes to "Final Check"
# Just in case, we also check what happens if we have
# more attempts than allowed.
attempts = random.randint(1, 10)
module = CapaFactory.create(attempts=attempts - 1, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
module = CapaFactory.create(attempts=attempts, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
module = CapaFactory.create(attempts=attempts + 1, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
# Otherwise, button name is "Check"
module = CapaFactory.create(attempts=attempts - 2, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Check")
module = CapaFactory.create(attempts=attempts - 3, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Check")
# If no limit on attempts, then always show "Check"
module = CapaFactory.create(attempts=attempts - 3)
self.assertEqual(module.check_button_name(), "Check")
module = CapaFactory.create(attempts=0)
self.assertEqual(module.check_button_name(), "Check")
def test_check_button_checking_name(self):
module = CapaFactory.create(attempts=1, max_attempts=10)
self.assertEqual(module.check_button_checking_name(), "Checking...")
module = CapaFactory.create(attempts=10, max_attempts=10)
self.assertEqual(module.check_button_checking_name(), "Checking...")
def test_check_button_name_customization(self):
module = CapaFactory.create(
attempts=1,
max_attempts=10,
text_customization={"custom_check": "Submit", "custom_final_check": "Final Submit"}
)
self.assertEqual(module.check_button_name(), "Submit")
module = CapaFactory.create(attempts=9,
max_attempts=10,
text_customization={"custom_check": "Submit", "custom_final_check": "Final Submit"}
)
self.assertEqual(module.check_button_name(), "Final Submit")
def test_check_button_checking_name_customization(self):
module = CapaFactory.create(
attempts=1,
max_attempts=10,
text_customization={
"custom_check": "Submit",
"custom_final_check": "Final Submit",
"custom_checking": "Checking..."
}
)
self.assertEqual(module.check_button_checking_name(), "Checking...")
module = CapaFactory.create(
attempts=9,
max_attempts=10,
text_customization={
"custom_check": "Submit",
"custom_final_check": "Final Submit",
"custom_checking": "Checking..."
}
)
self.assertEqual(module.check_button_checking_name(), "Checking...")
def test_should_show_check_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show check button
module = CapaFactory.create(due=self.yesterday_str)
self.assertFalse(module.should_show_check_button())
# If user is out of attempts, do NOT show the check button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts)
self.assertFalse(module.should_show_check_button())
# If survey question (max_attempts = 0), do NOT show the check button
module = CapaFactory.create(max_attempts=0)
self.assertFalse(module.should_show_check_button())
# If user submitted a problem but hasn't reset,
# do NOT show the check button
# Note: we can only reset when rerandomize="always" or "true"
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=True)
self.assertFalse(module.should_show_check_button())
module = CapaFactory.create(rerandomize="true", done=True)
self.assertFalse(module.should_show_check_button())
# Otherwise, DO show the check button
module = CapaFactory.create()
self.assertTrue(module.should_show_check_button())
# If the user has submitted the problem
# and we do NOT have a reset button, then we can show the check button
# Setting rerandomize to "never" or "false" ensures that the reset button
# is not shown
module = CapaFactory.create(rerandomize=RANDOMIZATION.NEVER, done=True)
self.assertTrue(module.should_show_check_button())
module = CapaFactory.create(rerandomize="false", done=True)
self.assertTrue(module.should_show_check_button())
module = CapaFactory.create(rerandomize=RANDOMIZATION.PER_STUDENT, done=True)
self.assertTrue(module.should_show_check_button())
def test_should_show_reset_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show the reset button
module = CapaFactory.create(due=self.yesterday_str, done=True)
self.assertFalse(module.should_show_reset_button())
# If the user is out of attempts, do NOT show the reset button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts, done=True)
self.assertFalse(module.should_show_reset_button())
# pre studio default value, DO show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=True)
self.assertTrue(module.should_show_reset_button())
# If survey question for capa (max_attempts = 0),
# DO show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, max_attempts=0, done=True)
self.assertTrue(module.should_show_reset_button())
# If the question is not correct
# DO show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, max_attempts=0, done=True, correct=False)
self.assertTrue(module.should_show_reset_button())
# If the question is correct and randomization is never
# DO not show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.NEVER, max_attempts=0, done=True, correct=True)
self.assertFalse(module.should_show_reset_button())
# If the question is correct and randomization is always
# Show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, max_attempts=0, done=True, correct=True)
self.assertTrue(module.should_show_reset_button())
# Don't show reset button if randomization is turned on and the question is not done
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, show_reset_button=False, done=False)
self.assertFalse(module.should_show_reset_button())
# Show reset button if randomization is turned on and the problem is done
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, show_reset_button=False, done=True)
self.assertTrue(module.should_show_reset_button())
def test_should_show_save_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show the save button
module = CapaFactory.create(due=self.yesterday_str, done=True)
self.assertFalse(module.should_show_save_button())
# If the user is out of attempts, do NOT show the save button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts, done=True)
self.assertFalse(module.should_show_save_button())
# If user submitted a problem but hasn't reset, do NOT show the save button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=True)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(rerandomize="true", done=True)
self.assertFalse(module.should_show_save_button())
# If the user has unlimited attempts and we are not randomizing,
# then do NOT show a save button
# because they can keep using "Check"
module = CapaFactory.create(max_attempts=None, rerandomize=RANDOMIZATION.NEVER, done=False)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(max_attempts=None, rerandomize="false", done=True)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(max_attempts=None, rerandomize=RANDOMIZATION.PER_STUDENT, done=True)
self.assertFalse(module.should_show_save_button())
# pre-studio default, DO show the save button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=False)
self.assertTrue(module.should_show_save_button())
# If we're not randomizing and we have limited attempts, then we can save
module = CapaFactory.create(rerandomize=RANDOMIZATION.NEVER, max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(rerandomize="false", max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(rerandomize=RANDOMIZATION.PER_STUDENT, max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
# If survey question for capa (max_attempts = 0),
# DO show the save button
module = CapaFactory.create(max_attempts=0, done=False)
self.assertTrue(module.should_show_save_button())
def test_should_show_save_button_force_save_button(self):
# If we're after the deadline, do NOT show the save button
# even though we're forcing a save
module = CapaFactory.create(due=self.yesterday_str,
force_save_button="true",
done=True)
self.assertFalse(module.should_show_save_button())
# If the user is out of attempts, do NOT show the save button
attempts = random.randint(1, 10)
module = CapaFactory.create(attempts=attempts,
max_attempts=attempts,
force_save_button="true",
done=True)
self.assertFalse(module.should_show_save_button())
# Otherwise, if we force the save button,
# then show it even if we would ordinarily
# require a reset first
module = CapaFactory.create(force_save_button="true",
rerandomize=RANDOMIZATION.ALWAYS,
done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(force_save_button="true",
rerandomize="true",
done=True)
self.assertTrue(module.should_show_save_button())
def test_no_max_attempts(self):
module = CapaFactory.create(max_attempts='')
html = module.get_problem_html()
self.assertTrue(html is not None)
# assert that we got here without exploding
def test_get_problem_html(self):
module = CapaFactory.create()
# We've tested the show/hide button logic in other tests,
# so here we hard-wire the values
show_check_button = bool(random.randint(0, 1) % 2)
show_reset_button = bool(random.randint(0, 1) % 2)
show_save_button = bool(random.randint(0, 1) % 2)
module.should_show_check_button = Mock(return_value=show_check_button)
module.should_show_reset_button = Mock(return_value=show_reset_button)
module.should_show_save_button = Mock(return_value=show_save_button)
# Mock the system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Patch the capa problem's HTML rendering
with patch('capa.capa_problem.LoncapaProblem.get_html') as mock_html:
mock_html.return_value = "<div>Test Problem HTML</div>"
# Render the problem HTML
html = module.get_problem_html(encapsulate=False)
# Also render the problem encapsulated in a <div>
html_encapsulated = module.get_problem_html(encapsulate=True)
# Expect that we get the rendered template back
self.assertEqual(html, "<div>Test Template HTML</div>")
# Check the rendering context
render_args, _ = module.system.render_template.call_args
self.assertEqual(len(render_args), 2)
template_name = render_args[0]
self.assertEqual(template_name, "problem.html")
context = render_args[1]
self.assertEqual(context['problem']['html'], "<div>Test Problem HTML</div>")
self.assertEqual(bool(context['check_button']), show_check_button)
self.assertEqual(bool(context['reset_button']), show_reset_button)
self.assertEqual(bool(context['save_button']), show_save_button)
# Assert that the encapsulated html contains the original html
self.assertTrue(html in html_encapsulated)
demand_xml = """
<problem>
<p>That is the question</p>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">Alpha <choicehint>A hint</choicehint>
</choice>
<choice correct="true">Beta</choice>
</choicegroup>
</multiplechoiceresponse>
<demandhint>
<hint>Demand 1</hint>
<hint>Demand 2</hint>
</demandhint>
</problem>"""
def test_demand_hint(self):
# HTML generation is mocked out to be meaningless here, so instead we check
# the context dict passed into HTML generation.
module = CapaFactory.create(xml=self.demand_xml)
module.get_problem_html() # ignoring html result
context = module.system.render_template.call_args[0][1]
self.assertEqual(context['demand_hint_possible'], True)
# Check the AJAX call that gets the hint by index
result = module.get_demand_hint(0)
self.assertEqual(result['contents'], u'Hint (1 of 2): Demand 1')
self.assertEqual(result['hint_index'], 0)
result = module.get_demand_hint(1)
self.assertEqual(result['contents'], u'Hint (2 of 2): Demand 2')
self.assertEqual(result['hint_index'], 1)
result = module.get_demand_hint(2) # here the server wraps around to index 0
self.assertEqual(result['contents'], u'Hint (1 of 2): Demand 1')
self.assertEqual(result['hint_index'], 0)
def test_demand_hint_logging(self):
module = CapaFactory.create(xml=self.demand_xml)
# Re-mock the module_id to a fixed string, so we can check the logging
module.location = Mock(module.location)
module.location.to_deprecated_string.return_value = 'i4x://edX/capa_test/problem/meh'
module.get_problem_html()
module.get_demand_hint(0)
module.runtime.track_function.assert_called_with(
'edx.problem.hint.demandhint_displayed',
{'hint_index': 0, 'module_id': u'i4x://edX/capa_test/problem/meh',
'hint_text': 'Demand 1', 'hint_len': 2}
)
def test_input_state_consistency(self):
module1 = CapaFactory.create()
module2 = CapaFactory.create()
# check to make sure that the input_state and the keys have the same values
module1.set_state_from_lcp()
self.assertEqual(module1.lcp.inputs.keys(), module1.input_state.keys())
module2.set_state_from_lcp()
intersection = set(module2.input_state.keys()).intersection(set(module1.input_state.keys()))
self.assertEqual(len(intersection), 0)
def test_get_problem_html_error(self):
"""
In production, when an error occurs with the problem HTML
rendering, a "dummy" problem is created with an error
message to display to the user.
"""
module = CapaFactory.create()
# Save the original problem so we can compare it later
original_problem = module.lcp
# Simulate throwing an exception when the capa problem
# is asked to render itself as HTML
module.lcp.get_html = Mock(side_effect=Exception("Test"))
# Stub out the get_test_system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Turn off DEBUG
module.system.DEBUG = False
# Try to render the module with DEBUG turned off
html = module.get_problem_html()
self.assertTrue(html is not None)
# Check the rendering context
render_args, _ = module.system.render_template.call_args
context = render_args[1]
self.assertTrue("error" in context['problem']['html'])
# Expect that the module has created a new dummy problem with the error
self.assertNotEqual(original_problem, module.lcp)
def test_get_problem_html_error_w_debug(self):
"""
Test the html response when an error occurs with DEBUG on
"""
module = CapaFactory.create()
# Simulate throwing an exception when the capa problem
# is asked to render itself as HTML
error_msg = u"Superterrible error happened: ☠"
module.lcp.get_html = Mock(side_effect=Exception(error_msg))
# Stub out the get_test_system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Make sure DEBUG is on
module.system.DEBUG = True
# Try to render the module with DEBUG turned on
html = module.get_problem_html()
self.assertTrue(html is not None)
# Check the rendering context
render_args, _ = module.system.render_template.call_args
context = render_args[1]
self.assertTrue(error_msg in context['problem']['html'])
@ddt.data(
'false',
'true',
RANDOMIZATION.NEVER,
RANDOMIZATION.PER_STUDENT,
RANDOMIZATION.ALWAYS,
RANDOMIZATION.ONRESET
)
def test_random_seed_no_change(self, rerandomize):
# Run the test for each possible rerandomize value
module = CapaFactory.create(rerandomize=rerandomize)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
# If we're not rerandomizing, the seed is always set
# to the same value (1)
if rerandomize == RANDOMIZATION.NEVER:
self.assertEqual(seed, 1,
msg="Seed should always be 1 when rerandomize='%s'" % rerandomize)
# Check the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that the seed is the same
self.assertEqual(seed, module.seed)
# Save the problem
module.save_problem(get_request_dict)
# Expect that the seed is the same
self.assertEqual(seed, module.seed)
@ddt.data(
'false',
'true',
RANDOMIZATION.NEVER,
RANDOMIZATION.PER_STUDENT,
RANDOMIZATION.ALWAYS,
RANDOMIZATION.ONRESET
)
def test_random_seed_with_reset(self, rerandomize):
"""
Run the test for each possible rerandomize value
"""
def _reset_and_get_seed(module):
"""
Reset the XModule and return the module's seed
"""
# Simulate submitting an attempt
# We need to do this, or reset_problem() will
# fail because it won't re-randomize until the problem has been submitted
# the problem yet.
module.done = True
# Reset the problem
module.reset_problem({})
# Return the seed
return module.seed
def _retry_and_check(num_tries, test_func):
'''
Returns True if *test_func* was successful
(returned True) within *num_tries* attempts
*test_func* must be a function
of the form test_func() -> bool
'''
success = False
for i in range(num_tries):
if test_func() is True:
success = True
break
return success
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
# We do NOT want the seed to reset if rerandomize
# is set to 'never' -- it should still be 1
# The seed also stays the same if we're randomizing
# 'per_student': the same student should see the same problem
if rerandomize in [RANDOMIZATION.NEVER,
'false',
RANDOMIZATION.PER_STUDENT]:
self.assertEqual(seed, _reset_and_get_seed(module))
# Otherwise, we expect the seed to change
# to another valid seed
else:
# Since there's a small chance we might get the
# same seed again, give it 5 chances
# to generate a different seed
success = _retry_and_check(5, lambda: _reset_and_get_seed(module) != seed)
self.assertTrue(module.seed is not None)
msg = 'Could not get a new seed from reset after 5 tries'
self.assertTrue(success, msg)
@ddt.data(
'false',
'true',
RANDOMIZATION.NEVER,
RANDOMIZATION.PER_STUDENT,
RANDOMIZATION.ALWAYS,
RANDOMIZATION.ONRESET
)
def test_random_seed_with_reset_question_unsubmitted(self, rerandomize):
"""
Run the test for each possible rerandomize value
"""
def _reset_and_get_seed(module):
"""
Reset the XModule and return the module's seed
"""
# Reset the problem
# By default, the problem is instantiated as unsubmitted
module.reset_problem({})
# Return the seed
return module.seed
module = CapaFactory.create(rerandomize=rerandomize, done=False)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
#the seed should never change because the student hasn't finished the problem
self.assertEqual(seed, _reset_and_get_seed(module))
@ddt.data(
RANDOMIZATION.ALWAYS,
RANDOMIZATION.PER_STUDENT,
'true',
RANDOMIZATION.ONRESET
)
def test_random_seed_bins(self, rerandomize):
# Assert that we are limiting the number of possible seeds.
# Get a bunch of seeds, they should all be in 0-999.
i = 200
while i > 0:
module = CapaFactory.create(rerandomize=rerandomize)
assert 0 <= module.seed < 1000
i -= 1
@patch('xmodule.capa_base.log')
@patch('xmodule.capa_base.Progress')
def test_get_progress_error(self, mock_progress, mock_log):
"""
Check that an exception given in `Progress` produces a `log.exception` call.
"""
error_types = [TypeError, ValueError]
for error_type in error_types:
mock_progress.side_effect = error_type
module = CapaFactory.create()
self.assertIsNone(module.get_progress())
mock_log.exception.assert_called_once_with('Got bad progress')
mock_log.reset_mock()
@patch('xmodule.capa_base.Progress')
def test_get_progress_no_error_if_weight_zero(self, mock_progress):
"""
Check that if the weight is 0 get_progress does not try to create a Progress object.
"""
mock_progress.return_value = True
module = CapaFactory.create()
module.weight = 0
progress = module.get_progress()
self.assertIsNone(progress)
self.assertFalse(mock_progress.called)
@patch('xmodule.capa_base.Progress')
def test_get_progress_calculate_progress_fraction(self, mock_progress):
"""
Check that score and total are calculated correctly for the progress fraction.
"""
module = CapaFactory.create()
module.weight = 1
module.get_progress()
mock_progress.assert_called_with(0, 1)
other_module = CapaFactory.create(correct=True)
other_module.weight = 1
other_module.get_progress()
mock_progress.assert_called_with(1, 1)
def test_get_html(self):
"""
Check that get_html() calls get_progress() with no arguments.
"""
module = CapaFactory.create()
module.get_progress = Mock(wraps=module.get_progress)
module.get_html()
module.get_progress.assert_called_once_with()
def test_get_problem(self):
"""
Check that get_problem() returns the expected dictionary.
"""
module = CapaFactory.create()
self.assertEquals(module.get_problem("data"), {'html': module.get_problem_html(encapsulate=False)})
# Standard question with shuffle="true" used by a few tests
common_shuffle_xml = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
def test_check_unmask(self):
"""
Check that shuffle unmasking is plumbed through: when check_problem is called,
unmasked names should appear in the track_function event_info.
"""
module = CapaFactory.create(xml=self.common_shuffle_xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'choice_3'} # the correct choice
module.check_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEqual(event_info['answers'][CapaFactory.answer_key()], 'choice_3')
# 'permutation' key added to record how problem was shown
self.assertEquals(event_info['permutation'][CapaFactory.answer_key()],
('shuffle', ['choice_3', 'choice_1', 'choice_2', 'choice_0']))
self.assertEquals(event_info['success'], 'correct')
@unittest.skip("masking temporarily disabled")
def test_save_unmask(self):
"""On problem save, unmasked data should appear on track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.save_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(event_info['answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
@unittest.skip("masking temporarily disabled")
def test_reset_unmask(self):
"""On problem reset, unmask names should appear track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
# On reset, 'old_state' should use unmasked names
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.reset_problem(None)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(mock_call[1][0], 'reset_problem')
self.assertEquals(event_info['old_state']['student_answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
@unittest.skip("masking temporarily disabled")
def test_rescore_unmask(self):
"""On problem rescore, unmasked names should appear on track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
# On rescore, state/student_answers should use unmasked names
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.rescore_problem()
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(mock_call[1][0], 'problem_rescore')
self.assertEquals(event_info['state']['student_answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
def test_check_unmask_answerpool(self):
"""Check answer-pool question track_function uses unmasked names"""
xml = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="4">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
module = CapaFactory.create(xml=xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'choice_2'} # mask_X form when masking enabled
module.check_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEqual(event_info['answers'][CapaFactory.answer_key()], 'choice_2')
# 'permutation' key added to record how problem was shown
self.assertEquals(event_info['permutation'][CapaFactory.answer_key()],
('answerpool', ['choice_1', 'choice_3', 'choice_2', 'choice_0']))
self.assertEquals(event_info['success'], 'incorrect')
@ddt.ddt
class CapaDescriptorTest(unittest.TestCase):
sample_checkbox_problem_xml = textwrap.dedent("""
<problem>
<p>Title</p>
<p>Description</p>
<p>Example</p>
<p>The following languages are in the Indo-European family:</p>
<choiceresponse>
<checkboxgroup label="The following languages are in the Indo-European family:">
<choice correct="true">Urdu</choice>
<choice correct="false">Finnish</choice>
<choice correct="true">Marathi</choice>
<choice correct="true">French</choice>
<choice correct="false">Hungarian</choice>
</checkboxgroup>
</choiceresponse>
<p>Note: Make sure you select all of the correct options—there may be more than one!</p>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>Solution for CAPA problem</p>
</div>
</solution>
</problem>
""")
sample_dropdown_problem_xml = textwrap.dedent("""
<problem>
<p>Dropdown problems allow learners to select only one option from a list of options.</p>
<p>Description</p>
<p>You can use the following example problem as a model.</p>
<p> Which of the following countries celebrates its independence on August 15?</p>
<optionresponse>
<optioninput label="lbl" options="('India','Spain','China','Bermuda')" correct="India"></optioninput>
</optionresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p> India became an independent nation on August 15, 1947.</p>
</div>
</solution>
</problem>
""")
sample_multichoice_problem_xml = textwrap.dedent("""
<problem>
<p>Multiple choice problems allow learners to select only one option.</p>
<p>When you add the problem, be sure to select Settings to specify a Display Name and other values.</p>
<p>You can use the following example problem as a model.</p>
<p>Which of the following countries has the largest population?</p>
<multiplechoiceresponse>
<choicegroup label="Which of the following countries has the largest population?" type="MultipleChoice">
<choice correct="false">Brazil
<choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint>
</choice>
<choice correct="false">Germany</choice>
<choice correct="true">Indonesia</choice>
<choice correct="false">Russia</choice>
</choicegroup>
</multiplechoiceresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>According to September 2014 estimates:</p>
<p>The population of Indonesia is approximately 250 million.</p>
<p>The population of Brazil is approximately 200 million.</p>
<p>The population of Russia is approximately 146 million.</p>
<p>The population of Germany is approximately 81 million.</p>
</div>
</solution>
</problem>
""")
sample_numerical_input_problem_xml = textwrap.dedent("""
<problem>
<p>In a numerical input problem, learners enter numbers or a specific and relatively simple mathematical
expression. Learners enter the response in plain text, and the system then converts the text to a symbolic
expression that learners can see below the response field.</p>
<p>The system can handle several types of characters, including basic operators, fractions, exponents, and
common constants such as "i". You can refer learners to "Entering Mathematical and Scientific Expressions"
in the edX Guide for Students for more information.</p>
<p>When you add the problem, be sure to select Settings to specify a Display Name and other values that
apply.</p>
<p>You can use the following example problems as models.</p>
<p>How many miles away from Earth is the sun? Use scientific notation to answer.</p>
<numericalresponse answer="9.3*10^7">
<formulaequationinput label="How many miles away from Earth is the sun?
Use scientific notation to answer." />
</numericalresponse>
<p>The square of what number is -100?</p>
<numericalresponse answer="10*i">
<formulaequationinput label="The square of what number is -100?" />
</numericalresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>The sun is 93,000,000, or 9.3*10^7, miles away from Earth.</p>
<p>-100 is the square of 10 times the imaginary number, i.</p>
</div>
</solution>
</problem>
""")
sample_text_input_problem_xml = textwrap.dedent("""
<problem>
<p>In text input problems, also known as "fill-in-the-blank" problems, learners enter text into a response
field. The text can include letters and characters such as punctuation marks. The text that the learner
enters must match your specified answer text exactly. You can specify more than one correct answer.
Learners must enter a response that matches one of the correct answers exactly.</p>
<p>When you add the problem, be sure to select Settings to specify a Display Name and other values that
apply.</p>
<p>You can use the following example problem as a model.</p>
<p>What was the first post-secondary school in China to allow both male and female students?</p>
<stringresponse answer="Nanjing Higher Normal Institute" type="ci" >
<additional_answer answer="National Central University"></additional_answer>
<additional_answer answer="Nanjing University"></additional_answer>
<textline label="What was the first post-secondary school in China to allow both male and female
students?" size="20"/>
</stringresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>Nanjing Higher Normal Institute first admitted female students in 1920.</p>
</div>
</solution>
</problem>
""")
sample_checkboxes_with_hints_and_feedback_problem_xml = textwrap.dedent("""
<problem>
<p>You can provide feedback for each option in a checkbox problem, with distinct feedback depending on
whether or not the learner selects that option.</p>
<p>You can also provide compound feedback for a specific combination of answers. For example, if you have
three possible answers in the problem, you can configure specific feedback for when a learner selects each
combination of possible answers.</p>
<p>You can also add hints for learners.</p>
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<p>Which of the following is a fruit? Check all that apply.</p>
<choiceresponse>
<checkboxgroup label="Which of the following is a fruit? Check all that apply.">
<choice correct="true">apple
<choicehint selected="true">You are correct that an apple is a fruit because it is the fertilized
ovary that comes from an apple tree and contains seeds.</choicehint>
<choicehint selected="false">Remember that an apple is also a fruit.</choicehint></choice>
<choice correct="true">pumpkin
<choicehint selected="true">You are correct that a pumpkin is a fruit because it is the fertilized
ovary of a squash plant and contains seeds.</choicehint>
<choicehint selected="false">Remember that a pumpkin is also a fruit.</choicehint></choice>
<choice correct="false">potato
<choicehint selected="true">A potato is a vegetable, not a fruit, because it does not come from a
flower and does not contain seeds.</choicehint>
<choicehint selected="false">You are correct that a potato is a vegetable because it is an edible
part of a plant in tuber form.</choicehint></choice>
<choice correct="true">tomato
<choicehint selected="true">You are correct that a tomato is a fruit because it is the fertilized
ovary of a tomato plant and contains seeds.</choicehint>
<choicehint selected="false">Many people mistakenly think a tomato is a vegetable. However, because
a tomato is the fertilized ovary of a tomato plant and contains seeds, it is a fruit.</choicehint>
</choice>
<compoundhint value="A B D">An apple, pumpkin, and tomato are all fruits as they all are fertilized
ovaries of a plant and contain seeds.</compoundhint>
<compoundhint value="A B C D">You are correct that an apple, pumpkin, and tomato are all fruits as they
all are fertilized ovaries of a plant and contain seeds. However, a potato is not a fruit as it is an
edible part of a plant in tuber form and is a vegetable.</compoundhint>
</checkboxgroup>
</choiceresponse>
<demandhint>
<hint>A fruit is the fertilized ovary from a flower.</hint>
<hint>A fruit contains seeds of the plant.</hint>
</demandhint>
</problem>
""")
sample_dropdown_with_hints_and_feedback_problem_xml = textwrap.dedent("""
<problem>
<p>You can provide feedback for each available option in a dropdown problem.</p>
<p>You can also add hints for learners.</p>
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<p> A/an ________ is a vegetable.</p>
<optionresponse>
<optioninput label=" A/an ________ is a vegetable.">
<option correct="False">apple <optionhint>An apple is the fertilized ovary that comes from an apple
tree and contains seeds, meaning it is a fruit.</optionhint></option>
<option correct="False">pumpkin <optionhint>A pumpkin is the fertilized ovary of a squash plant and
contains seeds, meaning it is a fruit.</optionhint></option>
<option correct="True">potato <optionhint>A potato is an edible part of a plant in tuber form and is a
vegetable.</optionhint></option>
<option correct="False">tomato <optionhint>Many people mistakenly think a tomato is a vegetable.
However, because a tomato is the fertilized ovary of a tomato plant and contains seeds, it is a fruit.
</optionhint></option>
</optioninput>
</optionresponse>
<demandhint>
<hint>A fruit is the fertilized ovary from a flower.</hint>
<hint>A fruit contains seeds of the plant.</hint>
</demandhint>
</problem>
""")
sample_multichoice_with_hints_and_feedback_problem_xml = textwrap.dedent("""
<problem>
<p>You can provide feedback for each option in a multiple choice problem.</p>
<p>You can also add hints for learners.</p>
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<p>Which of the following is a vegetable?</p>
<multiplechoiceresponse>
<choicegroup label="Which of the following is a vegetable?" type="MultipleChoice">
<choice correct="false">apple <choicehint>An apple is the fertilized ovary that comes from an apple
tree and contains seeds, meaning it is a fruit.</choicehint></choice>
<choice correct="false">pumpkin <choicehint>A pumpkin is the fertilized ovary of a squash plant and
contains seeds, meaning it is a fruit.</choicehint></choice>
<choice correct="true">potato <choicehint>A potato is an edible part of a plant in tuber form and is a
vegetable.</choicehint></choice>
<choice correct="false">tomato <choicehint>Many people mistakenly think a tomato is a vegetable.
However, because a tomato is the fertilized ovary of a tomato plant and contains seeds, it is a fruit.
</choicehint></choice>
</choicegroup>
</multiplechoiceresponse>
<demandhint>
<hint>A fruit is the fertilized ovary from a flower.</hint>
<hint>A fruit contains seeds of the plant.</hint>
</demandhint>
</problem>
""")
sample_numerical_input_with_hints_and_feedback_problem_xml = textwrap.dedent("""
<problem>
<p>You can provide feedback for correct answers in numerical input problems. You cannot provide feedback
for incorrect answers.</p>
<p>Use feedback for the correct answer to reinforce the process for arriving at the numerical value.</p>
<p>You can also add hints for learners.</p>
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<p>What is the arithmetic mean for the following set of numbers? (1, 5, 6, 3, 5)</p>
<numericalresponse answer="4">
<formulaequationinput label="What is the arithmetic mean for the following set of numbers?
(1, 5, 6, 3, 5)" />
<correcthint>The mean for this set of numbers is 20 / 5, which equals 4.</correcthint>
</numericalresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>The mean is calculated by summing the set of numbers and dividing by n. In this case:
(1 + 5 + 6 + 3 + 5) / 5 = 20 / 5 = 4.</p>
</div>
</solution>
<demandhint>
<hint>The mean is calculated by summing the set of numbers and dividing by n.</hint>
<hint>n is the count of items in the set.</hint>
</demandhint>
</problem>
""")
sample_text_input_with_hints_and_feedback_problem_xml = textwrap.dedent("""
<problem>
<p>You can provide feedback for the correct answer in text input problems, as well as for specific
incorrect answers.</p>
<p>Use feedback on expected incorrect answers to address common misconceptions and to provide guidance on
how to arrive at the correct answer.</p>
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<p>Which U.S. state has the largest land area?</p>
<stringresponse answer="Alaska" type="ci" >
<correcthint>Alaska is 576,400 square miles, more than double the land area of the second largest state,
Texas.</correcthint>
<stringequalhint answer="Texas">While many people think Texas is the largest state, it is actually the
second largest, with 261,797 square miles.</stringequalhint>
<stringequalhint answer="California">California is the third largest state, with 155,959 square miles.
</stringequalhint>
<textline label="Which U.S. state has the largest land area?" size="20"/>
</stringresponse>
<demandhint>
<hint>Consider the square miles, not population.</hint>
<hint>Consider all 50 states, not just the continental United States.</hint>
</demandhint>
</problem>
""")
def _create_descriptor(self, xml, name=None):
""" Creates a CapaDescriptor to run test against """
descriptor = CapaDescriptor(get_test_system(), scope_ids=1)
descriptor.data = xml
if name:
descriptor.display_name = name
return descriptor
@ddt.data(*responsetypes.registry.registered_tags())
def test_all_response_types(self, response_tag):
""" Tests that every registered response tag is correctly returned """
xml = "<problem><{response_tag}></{response_tag}></problem>".format(response_tag=response_tag)
name = "Some Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {response_tag})
self.assertEquals(descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': [response_tag],
'content': {
'display_name': name,
'capa_content': ''
}
})
def test_response_types_ignores_non_response_tags(self):
xml = textwrap.dedent("""
<problem>
<p>Label</p>
<div>Some comment</div>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="4">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
name = "Test Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse"})
self.assertEquals(descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["multiplechoiceresponse"],
'content': {
'display_name': name,
'capa_content': ' Label Some comment Apple Banana Chocolate Donut '
}
})
def test_response_types_multiple_tags(self):
xml = textwrap.dedent("""
<problem>
<p>Label</p>
<div>Some comment</div>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="1">
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="1">
<choice correct ="true">Buggy</choice>
</choicegroup>
</multiplechoiceresponse>
<optionresponse>
<optioninput label="Option" options="('1','2')" correct="2"></optioninput>
</optionresponse>
</problem>
""")
name = "Other Test Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse", "optionresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["optionresponse", "multiplechoiceresponse"],
'content': {
'display_name': name,
'capa_content': ' Label Some comment Donut Buggy '
}
}
)
def test_solutions_not_indexed(self):
xml = textwrap.dedent("""
<problem>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>This is what the 1st solution.</p>
</div>
</solution>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>This is the 2nd solution.</p>
</div>
</solution>
</problem>
""")
name = "Blank Common Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': [],
'content': {
'display_name': name,
'capa_content': ' '
}
}
)
def test_indexing_checkboxes(self):
name = "Checkboxes"
descriptor = self._create_descriptor(self.sample_checkbox_problem_xml, name=name)
capa_content = textwrap.dedent("""
Title
Description
Example
The following languages are in the Indo-European family:
Urdu
Finnish
Marathi
French
Hungarian
Note: Make sure you select all of the correct options—there may be more than one!
""")
self.assertEquals(descriptor.problem_types, {"choiceresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["choiceresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_dropdown(self):
name = "Dropdown"
descriptor = self._create_descriptor(self.sample_dropdown_problem_xml, name=name)
capa_content = textwrap.dedent("""
Dropdown problems allow learners to select only one option from a list of options.
Description
You can use the following example problem as a model.
Which of the following countries celebrates its independence on August 15?
""")
self.assertEquals(descriptor.problem_types, {"optionresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["optionresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_multiple_choice(self):
name = "Multiple Choice"
descriptor = self._create_descriptor(self.sample_multichoice_problem_xml, name=name)
capa_content = textwrap.dedent("""
Multiple choice problems allow learners to select only one option.
When you add the problem, be sure to select Settings to specify a Display Name and other values.
You can use the following example problem as a model.
Which of the following countries has the largest population?
Brazil
Germany
Indonesia
Russia
""")
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["multiplechoiceresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_numerical_input(self):
name = "Numerical Input"
descriptor = self._create_descriptor(self.sample_numerical_input_problem_xml, name=name)
capa_content = textwrap.dedent("""
In a numerical input problem, learners enter numbers or a specific and relatively simple mathematical
expression. Learners enter the response in plain text, and the system then converts the text to a symbolic
expression that learners can see below the response field.
The system can handle several types of characters, including basic operators, fractions, exponents, and
common constants such as "i". You can refer learners to "Entering Mathematical and Scientific Expressions"
in the edX Guide for Students for more information.
When you add the problem, be sure to select Settings to specify a Display Name and other values that
apply.
You can use the following example problems as models.
How many miles away from Earth is the sun? Use scientific notation to answer.
The square of what number is -100?
""")
self.assertEquals(descriptor.problem_types, {"numericalresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["numericalresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_text_input(self):
name = "Text Input"
descriptor = self._create_descriptor(self.sample_text_input_problem_xml, name=name)
capa_content = textwrap.dedent("""
In text input problems, also known as "fill-in-the-blank" problems, learners enter text into a response
field. The text can include letters and characters such as punctuation marks. The text that the learner
enters must match your specified answer text exactly. You can specify more than one correct answer.
Learners must enter a response that matches one of the correct answers exactly.
When you add the problem, be sure to select Settings to specify a Display Name and other values that
apply.
You can use the following example problem as a model.
What was the first post-secondary school in China to allow both male and female students?
""")
self.assertEquals(descriptor.problem_types, {"stringresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["stringresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_checkboxes_with_hints_and_feedback(self):
name = "Checkboxes with Hints and Feedback"
descriptor = self._create_descriptor(self.sample_checkboxes_with_hints_and_feedback_problem_xml, name=name)
capa_content = textwrap.dedent("""
You can provide feedback for each option in a checkbox problem, with distinct feedback depending on
whether or not the learner selects that option.
You can also provide compound feedback for a specific combination of answers. For example, if you have
three possible answers in the problem, you can configure specific feedback for when a learner selects each
combination of possible answers.
You can also add hints for learners.
Be sure to select Settings to specify a Display Name and other values that apply.
Use the following example problem as a model.
Which of the following is a fruit? Check all that apply.
apple
pumpkin
potato
tomato
""")
self.assertEquals(descriptor.problem_types, {"choiceresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["choiceresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_dropdown_with_hints_and_feedback(self):
name = "Dropdown with Hints and Feedback"
descriptor = self._create_descriptor(self.sample_dropdown_with_hints_and_feedback_problem_xml, name=name)
capa_content = textwrap.dedent("""
You can provide feedback for each available option in a dropdown problem.
You can also add hints for learners.
Be sure to select Settings to specify a Display Name and other values that apply.
Use the following example problem as a model.
A/an ________ is a vegetable.
apple
pumpkin
potato
tomato
""")
self.assertEquals(descriptor.problem_types, {"optionresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["optionresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_multiple_choice_with_hints_and_feedback(self):
name = "Multiple Choice with Hints and Feedback"
descriptor = self._create_descriptor(self.sample_multichoice_with_hints_and_feedback_problem_xml, name=name)
capa_content = textwrap.dedent("""
You can provide feedback for each option in a multiple choice problem.
You can also add hints for learners.
Be sure to select Settings to specify a Display Name and other values that apply.
Use the following example problem as a model.
Which of the following is a vegetable?
apple
pumpkin
potato
tomato
""")
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["multiplechoiceresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_numerical_input_with_hints_and_feedback(self):
name = "Numerical Input with Hints and Feedback"
descriptor = self._create_descriptor(self.sample_numerical_input_with_hints_and_feedback_problem_xml, name=name)
capa_content = textwrap.dedent("""
You can provide feedback for correct answers in numerical input problems. You cannot provide feedback
for incorrect answers.
Use feedback for the correct answer to reinforce the process for arriving at the numerical value.
You can also add hints for learners.
Be sure to select Settings to specify a Display Name and other values that apply.
Use the following example problem as a model.
What is the arithmetic mean for the following set of numbers? (1, 5, 6, 3, 5)
""")
self.assertEquals(descriptor.problem_types, {"numericalresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["numericalresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_text_input_with_hints_and_feedback(self):
name = "Text Input with Hints and Feedback"
descriptor = self._create_descriptor(self.sample_text_input_with_hints_and_feedback_problem_xml, name=name)
capa_content = textwrap.dedent("""
You can provide feedback for the correct answer in text input problems, as well as for specific
incorrect answers.
Use feedback on expected incorrect answers to address common misconceptions and to provide guidance on
how to arrive at the correct answer.
Be sure to select Settings to specify a Display Name and other values that apply.
Use the following example problem as a model.
Which U.S. state has the largest land area?
""")
self.assertEquals(descriptor.problem_types, {"stringresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': ["stringresponse"],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
def test_indexing_problem_with_html_tags(self):
sample_problem_xml = textwrap.dedent("""
<problem>
<style>p {left: 10px;}</style>
<!-- Beginning of the html -->
<p>This has HTML comment in it.<!-- Commenting Content --></p>
<!-- Here comes CDATA -->
<![CDATA[This is just a CDATA!]]>
<p>HTML end.</p>
<!-- Script that makes everything alive! -->
<script>
var alive;
</script>
</problem>
""")
name = "Mixed business"
descriptor = self._create_descriptor(sample_problem_xml, name=name)
capa_content = textwrap.dedent("""
This has HTML comment in it.
HTML end.
""")
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'problem_types': [],
'content': {
'display_name': name,
'capa_content': capa_content.replace("\n", " ")
}
}
)
class ComplexEncoderTest(unittest.TestCase):
def test_default(self):
"""
Check that complex numbers can be encoded into JSON.
"""
complex_num = 1 - 1j
expected_str = '1-1*j'
json_str = json.dumps(complex_num, cls=ComplexEncoder)
self.assertEqual(expected_str, json_str[1:-1]) # ignore quotes
class TestProblemCheckTracking(unittest.TestCase):
"""
Ensure correct tracking information is included in events emitted during problem checks.
"""
def setUp(self):
super(TestProblemCheckTracking, self).setUp()
self.maxDiff = None
def test_choice_answer_text(self):
xml = """\
<problem display_name="Multiple Choice Questions">
<p>What color is the open ocean on a sunny day?</p>
<optionresponse>
<optioninput options="('yellow','blue','green')" correct="blue" label="What color is the open ocean on a sunny day?"/>
</optionresponse>
<p>Which piece of furniture is built for sitting?</p>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false"><text>a table</text></choice>
<choice correct="false"><text>a desk</text></choice>
<choice correct="true"><text>a chair</text></choice>
<choice correct="false"><text>a bookshelf</text></choice>
</choicegroup>
</multiplechoiceresponse>
<p>Which of the following are musical instruments?</p>
<choiceresponse>
<checkboxgroup label="Which of the following are musical instruments?">
<choice correct="true">a piano</choice>
<choice correct="false">a tree</choice>
<choice correct="true">a guitar</choice>
<choice correct="false">a window</choice>
</checkboxgroup>
</choiceresponse>
</problem>
"""
# Whitespace screws up comparisons
xml = ''.join(line.strip() for line in xml.split('\n'))
factory = self.capa_factory_for_problem_xml(xml)
module = factory.create()
answer_input_dict = {
factory.input_key(2): 'blue',
factory.input_key(3): 'choice_0',
factory.input_key(4): ['choice_0', 'choice_1'],
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': 'What color is the open ocean on a sunny day?',
'answer': 'blue',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(3): {
'question': '',
'answer': u'<text>a table</text>',
'response_type': 'multiplechoiceresponse',
'input_type': 'choicegroup',
'correct': False,
'variant': '',
},
factory.answer_key(4): {
'question': 'Which of the following are musical instruments?',
'answer': [u'a piano', u'a tree'],
'response_type': 'choiceresponse',
'input_type': 'checkboxgroup',
'correct': False,
'variant': '',
},
})
def capa_factory_for_problem_xml(self, xml):
class CustomCapaFactory(CapaFactory):
"""
A factory for creating a Capa problem with arbitrary xml.
"""
sample_problem_xml = textwrap.dedent(xml)
return CustomCapaFactory
def get_event_for_answers(self, module, answer_input_dict):
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.check_problem(answer_input_dict)
self.assertGreaterEqual(len(mock_track_function.mock_calls), 1)
# There are potentially 2 track logs: answers and hint. [-1]=answers.
mock_call = mock_track_function.mock_calls[-1]
event = mock_call[1][1]
return event
def test_numerical_textline(self):
factory = CapaFactory
module = factory.create()
answer_input_dict = {
factory.input_key(2): '3.14'
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': '3.14',
'response_type': 'numericalresponse',
'input_type': 'textline',
'correct': True,
'variant': '',
}
})
def test_multiple_inputs(self):
factory = self.capa_factory_for_problem_xml("""\
<problem display_name="Multiple Inputs">
<p>Choose the correct color</p>
<optionresponse>
<p>What color is the sky?</p>
<optioninput options="('yellow','blue','green')" correct="blue"/>
<p>What color are pine needles?</p>
<optioninput options="('yellow','blue','green')" correct="green"/>
</optionresponse>
</problem>
""")
module = factory.create()
answer_input_dict = {
factory.input_key(2, 1): 'blue',
factory.input_key(2, 2): 'yellow',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2, 1): {
'question': '',
'answer': 'blue',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(2, 2): {
'question': '',
'answer': 'yellow',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': False,
'variant': '',
},
})
def test_optioninput_extended_xml(self):
"""Test the new XML form of writing with <option> tag instead of options= attribute."""
factory = self.capa_factory_for_problem_xml("""\
<problem display_name="Woo Hoo">
<p>Are you the Gatekeeper?</p>
<optionresponse>
<optioninput>
<option correct="True" label="Good Job">
apple
<optionhint>
banana
</optionhint>
</option>
<option correct="False" label="blorp">
cucumber
<optionhint>
donut
</optionhint>
</option>
</optioninput>
<optioninput>
<option correct="True">
apple
<optionhint>
banana
</optionhint>
</option>
<option correct="False">
cucumber
<optionhint>
donut
</optionhint>
</option>
</optioninput>
</optionresponse>
</problem>
""")
module = factory.create()
answer_input_dict = {
factory.input_key(2, 1): 'apple',
factory.input_key(2, 2): 'cucumber',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2, 1): {
'question': '',
'answer': 'apple',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(2, 2): {
'question': '',
'answer': 'cucumber',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': False,
'variant': '',
},
})
def test_rerandomized_inputs(self):
factory = CapaFactory
module = factory.create(rerandomize=RANDOMIZATION.ALWAYS)
answer_input_dict = {
factory.input_key(2): '3.14'
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': '3.14',
'response_type': 'numericalresponse',
'input_type': 'textline',
'correct': True,
'variant': module.seed,
}
})
def test_file_inputs(self):
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
factory = CapaFactoryWithFiles
module = factory.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok")) # pylint: disable=protected-access
module.system.xqueue['interface'] = xqueue_interface
answer_input_dict = {
CapaFactoryWithFiles.input_key(response_num=2): fileobjs,
CapaFactoryWithFiles.input_key(response_num=3): 'None',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': fpaths,
'response_type': 'coderesponse',
'input_type': 'filesubmission',
'correct': False,
'variant': '',
},
factory.answer_key(3): {
'answer': 'None',
'correct': True,
'question': '',
'response_type': 'customresponse',
'input_type': 'textline',
'variant': ''
}
})
def test_get_answer_with_jump_to_id_urls(self):
"""
Make sure replace_jump_to_id_urls() is called in get_answer.
"""
problem_xml = textwrap.dedent("""
<problem>
<p>What is 1+4?</p>
<numericalresponse answer="5">
<formulaequationinput />
</numericalresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<a href="/jump_to_id/c0f8d54964bc44a4a1deb8ecce561ecd">here's the same link to the hint page.</a>
</div>
</solution>
</problem>
""")
data = dict()
problem = CapaFactory.create(showanswer='always', xml=problem_xml)
problem.runtime.replace_jump_to_id_urls = Mock()
problem.get_answer(data)
self.assertTrue(problem.runtime.replace_jump_to_id_urls.called)
|
vivekmishra1991/scikit-learn | refs/heads/master | sklearn/cluster/dbscan_.py | 92 | # -*- coding: utf-8 -*-
"""
DBSCAN: Density-Based Spatial Clustering of Applications with Noise
"""
# Author: Robert Layton <robertlayton@gmail.com>
# Joel Nothman <joel.nothman@gmail.com>
# Lars Buitinck
#
# License: BSD 3 clause
import warnings
import numpy as np
from scipy import sparse
from ..base import BaseEstimator, ClusterMixin
from ..metrics import pairwise_distances
from ..utils import check_array, check_consistent_length
from ..utils.fixes import astype
from ..neighbors import NearestNeighbors
from ._dbscan_inner import dbscan_inner
def dbscan(X, eps=0.5, min_samples=5, metric='minkowski',
algorithm='auto', leaf_size=30, p=2, sample_weight=None,
random_state=None):
"""Perform DBSCAN clustering from vector array or distance matrix.
Read more in the :ref:`User Guide <dbscan>`.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between samples if
``metric='precomputed'``.
eps : float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples : int, optional
The number of samples (or total weight) in a neighborhood for a point
to be considered as a core point. This includes the point itself.
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.pairwise_distances for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square. X may be a sparse matrix, in which case only "nonzero"
elements may be considered neighbors for DBSCAN.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional
The algorithm to be used by the NearestNeighbors module
to compute pointwise distances and find nearest neighbors.
See NearestNeighbors module documentation for details.
leaf_size : int, optional (default = 30)
Leaf size passed to BallTree or cKDTree. This can affect the speed
of the construction and query, as well as the memory required
to store the tree. The optimal value depends
on the nature of the problem.
p : float, optional
The power of the Minkowski metric to be used to calculate distance
between points.
sample_weight : array, shape (n_samples,), optional
Weight of each sample, such that a sample with a weight of at least
``min_samples`` is by itself a core sample; a sample with negative
weight may inhibit its eps-neighbor from being core.
Note that weights are absolute, and default to 1.
random_state: numpy.RandomState, optional
Deprecated and ignored as of version 0.16, will be removed in version
0.18. DBSCAN does not use random initialization.
Returns
-------
core_samples : array [n_core_samples]
Indices of core samples.
labels : array [n_samples]
Cluster labels for each point. Noisy samples are given the label -1.
Notes
-----
See examples/cluster/plot_dbscan.py for an example.
This implementation bulk-computes all neighborhood queries, which increases
the memory complexity to O(n.d) where d is the average number of neighbors,
while original DBSCAN had memory complexity O(n).
Sparse neighborhoods can be precomputed using
:func:`NearestNeighbors.radius_neighbors_graph
<sklearn.neighbors.NearestNeighbors.radius_neighbors_graph>`
with ``mode='distance'``.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, "A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise".
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226-231. 1996
"""
if not eps > 0.0:
raise ValueError("eps must be positive.")
if random_state is not None:
warnings.warn("The parameter random_state is deprecated in 0.16 "
"and will be removed in version 0.18. "
"DBSCAN is deterministic except for rare border cases.",
category=DeprecationWarning)
X = check_array(X, accept_sparse='csr')
if sample_weight is not None:
sample_weight = np.asarray(sample_weight)
check_consistent_length(X, sample_weight)
# Calculate neighborhood for all samples. This leaves the original point
# in, which needs to be considered later (i.e. point i is in the
# neighborhood of point i. While True, its useless information)
if metric == 'precomputed' and sparse.issparse(X):
neighborhoods = np.empty(X.shape[0], dtype=object)
X.sum_duplicates() # XXX: modifies X's internals in-place
X_mask = X.data <= eps
masked_indices = astype(X.indices, np.intp, copy=False)[X_mask]
masked_indptr = np.cumsum(X_mask)[X.indptr[1:] - 1]
# insert the diagonal: a point is its own neighbor, but 0 distance
# means absence from sparse matrix data
masked_indices = np.insert(masked_indices, masked_indptr,
np.arange(X.shape[0]))
masked_indptr = masked_indptr[:-1] + np.arange(1, X.shape[0])
# split into rows
neighborhoods[:] = np.split(masked_indices, masked_indptr)
else:
neighbors_model = NearestNeighbors(radius=eps, algorithm=algorithm,
leaf_size=leaf_size,
metric=metric, p=p)
neighbors_model.fit(X)
# This has worst case O(n^2) memory complexity
neighborhoods = neighbors_model.radius_neighbors(X, eps,
return_distance=False)
if sample_weight is None:
n_neighbors = np.array([len(neighbors)
for neighbors in neighborhoods])
else:
n_neighbors = np.array([np.sum(sample_weight[neighbors])
for neighbors in neighborhoods])
# Initially, all samples are noise.
labels = -np.ones(X.shape[0], dtype=np.intp)
# A list of all core samples found.
core_samples = np.asarray(n_neighbors >= min_samples, dtype=np.uint8)
dbscan_inner(core_samples, neighborhoods, labels)
return np.where(core_samples)[0], labels
class DBSCAN(BaseEstimator, ClusterMixin):
"""Perform DBSCAN clustering from vector array or distance matrix.
DBSCAN - Density-Based Spatial Clustering of Applications with Noise.
Finds core samples of high density and expands clusters from them.
Good for data which contains clusters of similar density.
Read more in the :ref:`User Guide <dbscan>`.
Parameters
----------
eps : float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples : int, optional
The number of samples (or total weight) in a neighborhood for a point
to be considered as a core point. This includes the point itself.
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.calculate_distance for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square. X may be a sparse matrix, in which case only "nonzero"
elements may be considered neighbors for DBSCAN.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional
The algorithm to be used by the NearestNeighbors module
to compute pointwise distances and find nearest neighbors.
See NearestNeighbors module documentation for details.
leaf_size : int, optional (default = 30)
Leaf size passed to BallTree or cKDTree. This can affect the speed
of the construction and query, as well as the memory required
to store the tree. The optimal value depends
on the nature of the problem.
random_state: numpy.RandomState, optional
Deprecated and ignored as of version 0.16, will be removed in version
0.18. DBSCAN does not use random initialization.
Attributes
----------
core_sample_indices_ : array, shape = [n_core_samples]
Indices of core samples.
components_ : array, shape = [n_core_samples, n_features]
Copy of each core sample found by training.
labels_ : array, shape = [n_samples]
Cluster labels for each point in the dataset given to fit().
Noisy samples are given the label -1.
Notes
-----
See examples/cluster/plot_dbscan.py for an example.
This implementation bulk-computes all neighborhood queries, which increases
the memory complexity to O(n.d) where d is the average number of neighbors,
while original DBSCAN had memory complexity O(n).
Sparse neighborhoods can be precomputed using
:func:`NearestNeighbors.radius_neighbors_graph
<sklearn.neighbors.NearestNeighbors.radius_neighbors_graph>`
with ``mode='distance'``.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, "A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise".
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226-231. 1996
"""
def __init__(self, eps=0.5, min_samples=5, metric='euclidean',
algorithm='auto', leaf_size=30, p=None, random_state=None):
self.eps = eps
self.min_samples = min_samples
self.metric = metric
self.algorithm = algorithm
self.leaf_size = leaf_size
self.p = p
self.random_state = random_state
def fit(self, X, y=None, sample_weight=None):
"""Perform DBSCAN clustering from features or distance matrix.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between samples if
``metric='precomputed'``.
sample_weight : array, shape (n_samples,), optional
Weight of each sample, such that a sample with a weight of at least
``min_samples`` is by itself a core sample; a sample with negative
weight may inhibit its eps-neighbor from being core.
Note that weights are absolute, and default to 1.
"""
X = check_array(X, accept_sparse='csr')
clust = dbscan(X, sample_weight=sample_weight, **self.get_params())
self.core_sample_indices_, self.labels_ = clust
if len(self.core_sample_indices_):
# fix for scipy sparse indexing issue
self.components_ = X[self.core_sample_indices_].copy()
else:
# no core samples
self.components_ = np.empty((0, X.shape[1]))
return self
def fit_predict(self, X, y=None, sample_weight=None):
"""Performs clustering on X and returns cluster labels.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between samples if
``metric='precomputed'``.
sample_weight : array, shape (n_samples,), optional
Weight of each sample, such that a sample with a weight of at least
``min_samples`` is by itself a core sample; a sample with negative
weight may inhibit its eps-neighbor from being core.
Note that weights are absolute, and default to 1.
Returns
-------
y : ndarray, shape (n_samples,)
cluster labels
"""
self.fit(X, sample_weight=sample_weight)
return self.labels_
|
pilou-/ansible | refs/heads/devel | lib/ansible/modules/packaging/os/macports.py | 59 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Jimmy Tang <jcftang@gmail.com>
# Based on okpg (Patrick Pelletier <pp.pelletier@gmail.com>), pacman
# (Afterburn) and pkgin (Shaun Zinck) modules
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: macports
author: "Jimmy Tang (@jcftang)"
short_description: Package manager for MacPorts
description:
- Manages MacPorts packages (ports)
version_added: "1.1"
options:
name:
description:
- A list of port names.
aliases: ['port']
selfupdate:
description:
- Update Macports and the ports tree, either prior to installing ports or as a separate step.
- Equivalent to running C(port selfupdate).
aliases: ['update_cache', 'update_ports']
default: "no"
type: bool
state:
description:
- Indicates the desired state of the port.
choices: [ 'present', 'absent', 'active', 'inactive' ]
default: present
upgrade:
description:
- Upgrade all outdated ports, either prior to installing ports or as a separate step.
- Equivalent to running C(port upgrade outdated).
default: "no"
type: bool
version_added: "2.8"
variant:
description:
- A port variant specification.
- 'C(variant) is only supported with state: I(installed)/I(present).'
aliases: ['variants']
version_added: "2.7"
'''
EXAMPLES = '''
- name: Install the foo port
macports:
name: foo
- name: Install the universal, x11 variant of the foo port
macports:
name: foo
variant: +universal+x11
- name: Install a list of ports
macports:
name: "{{ ports }}"
vars:
ports:
- foo
- foo-tools
- name: Update Macports and the ports tree, then upgrade all outdated ports
macports:
selfupdate: yes
upgrade: yes
- name: Update Macports and the ports tree, then install the foo port
macports:
name: foo
selfupdate: yes
- name: Remove the foo port
macports:
name: foo
state: absent
- name: Activate the foo port
macports:
name: foo
state: active
- name: Deactivate the foo port
macports:
name: foo
state: inactive
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import shlex_quote
def selfupdate(module, port_path):
""" Update Macports and the ports tree. """
rc, out, err = module.run_command("%s -v selfupdate" % port_path)
if rc == 0:
updated = any(
re.search(r'Total number of ports parsed:\s+[^0]', s.strip()) or
re.search(r'Installing new Macports release', s.strip())
for s in out.split('\n')
if s
)
if updated:
changed = True
msg = "Macports updated successfully"
else:
changed = False
msg = "Macports already up-to-date"
return (changed, msg)
else:
module.fail_json(msg="Failed to update Macports", stdout=out, stderr=err)
def upgrade(module, port_path):
""" Upgrade outdated ports. """
rc, out, err = module.run_command("%s upgrade outdated" % port_path)
# rc is 1 when nothing to upgrade so check stdout first.
if out.strip() == "Nothing to upgrade.":
changed = False
msg = "Ports already upgraded"
return (changed, msg)
elif rc == 0:
changed = True
msg = "Outdated ports upgraded successfully"
return (changed, msg)
else:
module.fail_json(msg="Failed to upgrade outdated ports", stdout=out, stderr=err)
def query_port(module, port_path, name, state="present"):
""" Returns whether a port is installed or not. """
if state == "present":
rc, out, err = module.run_command("%s installed | grep -q ^.*%s" % (shlex_quote(port_path), shlex_quote(name)), use_unsafe_shell=True)
if rc == 0:
return True
return False
elif state == "active":
rc, out, err = module.run_command("%s installed %s | grep -q active" % (shlex_quote(port_path), shlex_quote(name)), use_unsafe_shell=True)
if rc == 0:
return True
return False
def remove_ports(module, port_path, ports):
""" Uninstalls one or more ports if installed. """
remove_c = 0
# Using a for loop in case of error, we can report the port that failed
for port in ports:
# Query the port first, to see if we even need to remove
if not query_port(module, port_path, port):
continue
rc, out, err = module.run_command("%s uninstall %s" % (port_path, port))
if query_port(module, port_path, port):
module.fail_json(msg="Failed to remove %s: %s" % (port, err))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="Removed %s port(s)" % remove_c)
module.exit_json(changed=False, msg="Port(s) already absent")
def install_ports(module, port_path, ports, variant):
""" Installs one or more ports if not already installed. """
install_c = 0
for port in ports:
if query_port(module, port_path, port):
continue
rc, out, err = module.run_command("%s install %s %s" % (port_path, port, variant))
if not query_port(module, port_path, port):
module.fail_json(msg="Failed to install %s: %s" % (port, err))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="Installed %s port(s)" % (install_c))
module.exit_json(changed=False, msg="Port(s) already present")
def activate_ports(module, port_path, ports):
""" Activate a port if it's inactive. """
activate_c = 0
for port in ports:
if not query_port(module, port_path, port):
module.fail_json(msg="Failed to activate %s, port(s) not present" % (port))
if query_port(module, port_path, port, state="active"):
continue
rc, out, err = module.run_command("%s activate %s" % (port_path, port))
if not query_port(module, port_path, port, state="active"):
module.fail_json(msg="Failed to activate %s: %s" % (port, err))
activate_c += 1
if activate_c > 0:
module.exit_json(changed=True, msg="Activated %s port(s)" % (activate_c))
module.exit_json(changed=False, msg="Port(s) already active")
def deactivate_ports(module, port_path, ports):
""" Deactivate a port if it's active. """
deactivated_c = 0
for port in ports:
if not query_port(module, port_path, port):
module.fail_json(msg="Failed to deactivate %s, port(s) not present" % (port))
if not query_port(module, port_path, port, state="active"):
continue
rc, out, err = module.run_command("%s deactivate %s" % (port_path, port))
if query_port(module, port_path, port, state="active"):
module.fail_json(msg="Failed to deactivate %s: %s" % (port, err))
deactivated_c += 1
if deactivated_c > 0:
module.exit_json(changed=True, msg="Deactivated %s port(s)" % (deactivated_c))
module.exit_json(changed=False, msg="Port(s) already inactive")
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=["port"], type='list'),
selfupdate=dict(aliases=["update_cache", "update_ports"], default=False, type='bool'),
state=dict(default="present", choices=["present", "installed", "absent", "removed", "active", "inactive"]),
upgrade=dict(default=False, type='bool'),
variant=dict(aliases=["variants"], default=None, type='str')
)
)
port_path = module.get_bin_path('port', True, ['/opt/local/bin'])
p = module.params
if p["selfupdate"]:
(changed, msg) = selfupdate(module, port_path)
if not (p["name"] or p["upgrade"]):
module.exit_json(changed=changed, msg=msg)
if p["upgrade"]:
(changed, msg) = upgrade(module, port_path)
if not p["name"]:
module.exit_json(changed=changed, msg=msg)
pkgs = p["name"]
variant = p["variant"]
if p["state"] in ["present", "installed"]:
install_ports(module, port_path, pkgs, variant)
elif p["state"] in ["absent", "removed"]:
remove_ports(module, port_path, pkgs)
elif p["state"] == "active":
activate_ports(module, port_path, pkgs)
elif p["state"] == "inactive":
deactivate_ports(module, port_path, pkgs)
if __name__ == '__main__':
main()
|
allenp/odoo | refs/heads/9.0 | openerp/addons/test_uninstall/__init__.py | 2355 | # -*- coding: utf-8 -*-
import models
|
mbauskar/alec_frappe5_erpnext | refs/heads/develop | erpnext/selling/doctype/customer/test_customer.py | 5 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.test_runner import make_test_records
from erpnext.exceptions import CustomerFrozen
test_ignore = ["Price List"]
test_records = frappe.get_test_records('Customer')
class TestCustomer(unittest.TestCase):
def test_party_details(self):
from erpnext.accounts.party import get_party_details
to_check = {
'selling_price_list': None,
'customer_group': '_Test Customer Group',
'contact_designation': None,
'customer_address': '_Test Address-Office',
'contact_department': None,
'contact_email': 'test_contact_customer@example.com',
'contact_mobile': None,
'sales_team': [],
'contact_display': '_Test Contact For _Test Customer',
'contact_person': '_Test Contact For _Test Customer-_Test Customer',
'territory': u'_Test Territory',
'contact_phone': '+91 0000000000',
'customer_name': '_Test Customer'
}
make_test_records("Address")
make_test_records("Contact")
frappe.db.set_value("Contact", "_Test Contact For _Test Customer-_Test Customer",
"is_primary_contact", 1)
details = get_party_details("_Test Customer")
for key, value in to_check.iteritems():
self.assertEquals(value, details.get(key))
def test_rename(self):
for name in ("_Test Customer 1", "_Test Customer 1 Renamed"):
frappe.db.sql("""delete from `tabComment` where comment_doctype=%s and comment_docname=%s""",
("Customer", name))
comment = frappe.new_doc("Comment")
comment.update({
"comment": "Test Comment for Rename",
"comment_doctype": "Customer",
"comment_docname": "_Test Customer 1"
})
comment.insert()
frappe.rename_doc("Customer", "_Test Customer 1", "_Test Customer 1 Renamed")
self.assertTrue(frappe.db.exists("Customer", "_Test Customer 1 Renamed"))
self.assertFalse(frappe.db.exists("Customer", "_Test Customer 1"))
# test that comment gets renamed
self.assertEquals(frappe.db.get_value("Comment",
{"comment_doctype": "Customer", "comment_docname": "_Test Customer 1 Renamed"}), comment.name)
frappe.rename_doc("Customer", "_Test Customer 1 Renamed", "_Test Customer 1")
def test_freezed_customer(self):
frappe.db.set_value("Customer", "_Test Customer", "is_frozen", 1)
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
so = make_sales_order(do_not_save= True)
self.assertRaises(CustomerFrozen, so.save)
frappe.db.set_value("Customer", "_Test Customer", "is_frozen", 0)
so.save()
|
izgzhen/servo | refs/heads/master | components/script/dom/bindings/codegen/parser/WebIDL.py | 5 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""" A WebIDL parser. """
from ply import lex, yacc
import re
import os
import traceback
import math
from collections import defaultdict
# Machinery
def parseInt(literal):
string = literal
sign = 0
base = 0
if string[0] == '-':
sign = -1
string = string[1:]
else:
sign = 1
if string[0] == '0' and len(string) > 1:
if string[1] == 'x' or string[1] == 'X':
base = 16
string = string[2:]
else:
base = 8
string = string[1:]
else:
base = 10
value = int(string, base)
return value * sign
# Magic for creating enums
def M_add_class_attribs(attribs, start):
def foo(name, bases, dict_):
for v, k in enumerate(attribs):
dict_[k] = start + v
assert 'length' not in dict_
dict_['length'] = start + len(attribs)
return type(name, bases, dict_)
return foo
def enum(*names, **kw):
if len(kw) == 1:
base = kw['base'].__class__
start = base.length
else:
assert len(kw) == 0
base = object
start = 0
class Foo(base):
__metaclass__ = M_add_class_attribs(names, start)
def __setattr__(self, name, value): # this makes it read-only
raise NotImplementedError
return Foo()
class WebIDLError(Exception):
def __init__(self, message, locations, warning=False):
self.message = message
self.locations = [str(loc) for loc in locations]
self.warning = warning
def __str__(self):
return "%s: %s%s%s" % (self.warning and 'warning' or 'error',
self.message,
", " if len(self.locations) != 0 else "",
"\n".join(self.locations))
class Location(object):
def __init__(self, lexer, lineno, lexpos, filename):
self._line = None
self._lineno = lineno
self._lexpos = lexpos
self._lexdata = lexer.lexdata
self._file = filename if filename else "<unknown>"
def __eq__(self, other):
return (self._lexpos == other._lexpos and
self._file == other._file)
def filename(self):
return self._file
def resolve(self):
if self._line:
return
startofline = self._lexdata.rfind('\n', 0, self._lexpos) + 1
endofline = self._lexdata.find('\n', self._lexpos, self._lexpos + 80)
if endofline != -1:
self._line = self._lexdata[startofline:endofline]
else:
self._line = self._lexdata[startofline:]
self._colno = self._lexpos - startofline
# Our line number seems to point to the start of self._lexdata
self._lineno += self._lexdata.count('\n', 0, startofline)
def get(self):
self.resolve()
return "%s line %s:%s" % (self._file, self._lineno, self._colno)
def _pointerline(self):
return " " * self._colno + "^"
def __str__(self):
self.resolve()
return "%s line %s:%s\n%s\n%s" % (self._file, self._lineno, self._colno,
self._line, self._pointerline())
class BuiltinLocation(object):
def __init__(self, text):
self.msg = text + "\n"
def __eq__(self, other):
return (isinstance(other, BuiltinLocation) and
self.msg == other.msg)
def filename(self):
return '<builtin>'
def resolve(self):
pass
def get(self):
return self.msg
def __str__(self):
return self.get()
# Data Model
class IDLObject(object):
def __init__(self, location):
self.location = location
self.userData = dict()
def filename(self):
return self.location.filename()
def isInterface(self):
return False
def isNamespace(self):
return False
def isEnum(self):
return False
def isCallback(self):
return False
def isType(self):
return False
def isDictionary(self):
return False
def isUnion(self):
return False
def isTypedef(self):
return False
def getUserData(self, key, default):
return self.userData.get(key, default)
def setUserData(self, key, value):
self.userData[key] = value
def addExtendedAttributes(self, attrs):
assert False # Override me!
def handleExtendedAttribute(self, attr):
assert False # Override me!
def _getDependentObjects(self):
assert False # Override me!
def getDeps(self, visited=None):
""" Return a set of files that this object depends on. If any of
these files are changed the parser needs to be rerun to regenerate
a new IDLObject.
The visited argument is a set of all the objects already visited.
We must test to see if we are in it, and if so, do nothing. This
prevents infinite recursion."""
# NB: We can't use visited=set() above because the default value is
# evaluated when the def statement is evaluated, not when the function
# is executed, so there would be one set for all invocations.
if visited is None:
visited = set()
if self in visited:
return set()
visited.add(self)
deps = set()
if self.filename() != "<builtin>":
deps.add(self.filename())
for d in self._getDependentObjects():
deps.update(d.getDeps(visited))
return deps
class IDLScope(IDLObject):
def __init__(self, location, parentScope, identifier):
IDLObject.__init__(self, location)
self.parentScope = parentScope
if identifier:
assert isinstance(identifier, IDLIdentifier)
self._name = identifier
else:
self._name = None
self._dict = {}
self.globalNames = set()
# A mapping from global name to the set of global interfaces
# that have that global name.
self.globalNameMapping = defaultdict(set)
self.primaryGlobalAttr = None
self.primaryGlobalName = None
def __str__(self):
return self.QName()
def QName(self):
if self._name:
return self._name.QName() + "::"
return "::"
def ensureUnique(self, identifier, object):
"""
Ensure that there is at most one 'identifier' in scope ('self').
Note that object can be None. This occurs if we end up here for an
interface type we haven't seen yet.
"""
assert isinstance(identifier, IDLUnresolvedIdentifier)
assert not object or isinstance(object, IDLObjectWithIdentifier)
assert not object or object.identifier == identifier
if identifier.name in self._dict:
if not object:
return
# ensureUnique twice with the same object is not allowed
assert id(object) != id(self._dict[identifier.name])
replacement = self.resolveIdentifierConflict(self, identifier,
self._dict[identifier.name],
object)
self._dict[identifier.name] = replacement
return
assert object
self._dict[identifier.name] = object
def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
if (isinstance(originalObject, IDLExternalInterface) and
isinstance(newObject, IDLExternalInterface) and
originalObject.identifier.name == newObject.identifier.name):
return originalObject
if (isinstance(originalObject, IDLExternalInterface) or
isinstance(newObject, IDLExternalInterface)):
raise WebIDLError(
"Name collision between "
"interface declarations for identifier '%s' at '%s' and '%s'"
% (identifier.name,
originalObject.location, newObject.location), [])
if (isinstance(originalObject, IDLDictionary) or
isinstance(newObject, IDLDictionary)):
raise WebIDLError(
"Name collision between dictionary declarations for "
"identifier '%s'.\n%s\n%s"
% (identifier.name,
originalObject.location, newObject.location), [])
# We do the merging of overloads here as opposed to in IDLInterface
# because we need to merge overloads of NamedConstructors and we need to
# detect conflicts in those across interfaces. See also the comment in
# IDLInterface.addExtendedAttributes for "NamedConstructor".
if (originalObject.tag == IDLInterfaceMember.Tags.Method and
newObject.tag == IDLInterfaceMember.Tags.Method):
return originalObject.addOverload(newObject)
# Default to throwing, derived classes can override.
conflictdesc = "\n\t%s at %s\n\t%s at %s" % (originalObject,
originalObject.location,
newObject,
newObject.location)
raise WebIDLError(
"Multiple unresolvable definitions of identifier '%s' in scope '%s%s"
% (identifier.name, str(self), conflictdesc), [])
def _lookupIdentifier(self, identifier):
return self._dict[identifier.name]
def lookupIdentifier(self, identifier):
assert isinstance(identifier, IDLIdentifier)
assert identifier.scope == self
return self._lookupIdentifier(identifier)
class IDLIdentifier(IDLObject):
def __init__(self, location, scope, name):
IDLObject.__init__(self, location)
self.name = name
assert isinstance(scope, IDLScope)
self.scope = scope
def __str__(self):
return self.QName()
def QName(self):
return self.scope.QName() + self.name
def __hash__(self):
return self.QName().__hash__()
def __eq__(self, other):
return self.QName() == other.QName()
def object(self):
return self.scope.lookupIdentifier(self)
class IDLUnresolvedIdentifier(IDLObject):
def __init__(self, location, name, allowDoubleUnderscore=False,
allowForbidden=False):
IDLObject.__init__(self, location)
assert len(name) > 0
if name == "__noSuchMethod__":
raise WebIDLError("__noSuchMethod__ is deprecated", [location])
if name[:2] == "__" and name != "__content" and not allowDoubleUnderscore:
raise WebIDLError("Identifiers beginning with __ are reserved",
[location])
if name[0] == '_' and not allowDoubleUnderscore:
name = name[1:]
# TODO: Bug 872377, Restore "toJSON" to below list.
# We sometimes need custom serialization, so allow toJSON for now.
if (name in ["constructor", "toString"] and
not allowForbidden):
raise WebIDLError("Cannot use reserved identifier '%s'" % (name),
[location])
self.name = name
def __str__(self):
return self.QName()
def QName(self):
return "<unresolved scope>::" + self.name
def resolve(self, scope, object):
assert isinstance(scope, IDLScope)
assert not object or isinstance(object, IDLObjectWithIdentifier)
assert not object or object.identifier == self
scope.ensureUnique(self, object)
identifier = IDLIdentifier(self.location, scope, self.name)
if object:
object.identifier = identifier
return identifier
def finish(self):
assert False # Should replace with a resolved identifier first.
class IDLObjectWithIdentifier(IDLObject):
def __init__(self, location, parentScope, identifier):
IDLObject.__init__(self, location)
assert isinstance(identifier, IDLUnresolvedIdentifier)
self.identifier = identifier
if parentScope:
self.resolve(parentScope)
self.treatNullAs = "Default"
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
assert isinstance(self.identifier, IDLUnresolvedIdentifier)
self.identifier.resolve(parentScope, self)
def checkForStringHandlingExtendedAttributes(self, attrs,
isDictionaryMember=False,
isOptional=False):
"""
A helper function to deal with TreatNullAs. Returns the list
of attrs it didn't handle itself.
"""
assert isinstance(self, IDLArgument) or isinstance(self, IDLAttribute)
unhandledAttrs = list()
for attr in attrs:
if not attr.hasValue():
unhandledAttrs.append(attr)
continue
identifier = attr.identifier()
value = attr.value()
if identifier == "TreatNullAs":
if not self.type.isDOMString() or self.type.nullable():
raise WebIDLError("[TreatNullAs] is only allowed on "
"arguments or attributes whose type is "
"DOMString",
[self.location])
if isDictionaryMember:
raise WebIDLError("[TreatNullAs] is not allowed for "
"dictionary members", [self.location])
if value != 'EmptyString':
raise WebIDLError("[TreatNullAs] must take the identifier "
"'EmptyString', not '%s'" % value,
[self.location])
self.treatNullAs = value
else:
unhandledAttrs.append(attr)
return unhandledAttrs
class IDLObjectWithScope(IDLObjectWithIdentifier, IDLScope):
def __init__(self, location, parentScope, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
IDLScope.__init__(self, location, parentScope, self.identifier)
class IDLIdentifierPlaceholder(IDLObjectWithIdentifier):
def __init__(self, location, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
def finish(self, scope):
try:
scope._lookupIdentifier(self.identifier)
except:
raise WebIDLError("Unresolved type '%s'." % self.identifier,
[self.location])
obj = self.identifier.resolve(scope, None)
return scope.lookupIdentifier(obj)
class IDLExposureMixins():
def __init__(self, location):
# _exposureGlobalNames are the global names listed in our [Exposed]
# extended attribute. exposureSet is the exposure set as defined in the
# Web IDL spec: it contains interface names.
self._exposureGlobalNames = set()
self.exposureSet = set()
self._location = location
self._globalScope = None
def finish(self, scope):
assert scope.parentScope is None
self._globalScope = scope
# Verify that our [Exposed] value, if any, makes sense.
for globalName in self._exposureGlobalNames:
if globalName not in scope.globalNames:
raise WebIDLError("Unknown [Exposed] value %s" % globalName,
[self._location])
if len(self._exposureGlobalNames) == 0:
self._exposureGlobalNames.add(scope.primaryGlobalName)
globalNameSetToExposureSet(scope, self._exposureGlobalNames,
self.exposureSet)
def isExposedInWindow(self):
return 'Window' in self.exposureSet
def isExposedOnMainThread(self):
return (self.isExposedInWindow() or
self.isExposedInSystemGlobals())
def isExposedInAnyWorker(self):
return len(self.getWorkerExposureSet()) > 0
def isExposedInWorkerDebugger(self):
return len(self.getWorkerDebuggerExposureSet()) > 0
def isExposedInSystemGlobals(self):
return 'BackstagePass' in self.exposureSet
def isExposedInSomeButNotAllWorkers(self):
"""
Returns true if the Exposed extended attribute for this interface
exposes it in some worker globals but not others. The return value does
not depend on whether the interface is exposed in Window or System
globals.
"""
if not self.isExposedInAnyWorker():
return False
workerScopes = self.parentScope.globalNameMapping["Worker"]
return len(workerScopes.difference(self.exposureSet)) > 0
def getWorkerExposureSet(self):
workerScopes = self._globalScope.globalNameMapping["Worker"]
return workerScopes.intersection(self.exposureSet)
def getWorkerDebuggerExposureSet(self):
workerDebuggerScopes = self._globalScope.globalNameMapping["WorkerDebugger"]
return workerDebuggerScopes.intersection(self.exposureSet)
class IDLExternalInterface(IDLObjectWithIdentifier, IDLExposureMixins):
def __init__(self, location, parentScope, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
assert isinstance(parentScope, IDLScope)
self.parent = None
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
IDLExposureMixins.__init__(self, location)
IDLObjectWithIdentifier.resolve(self, parentScope)
def finish(self, scope):
IDLExposureMixins.finish(self, scope)
pass
def validate(self):
pass
def isIteratorInterface(self):
return False
def isExternal(self):
return True
def isInterface(self):
return True
def isConsequential(self):
return False
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def resolve(self, parentScope):
pass
def getJSImplementation(self):
return None
def isJSImplemented(self):
return False
def isProbablyShortLivingObject(self):
return False
def isNavigatorProperty(self):
return False
def _getDependentObjects(self):
return set()
class IDLPartialInterfaceOrNamespace(IDLObject):
def __init__(self, location, name, members, nonPartialInterfaceOrNamespace):
assert isinstance(name, IDLUnresolvedIdentifier)
IDLObject.__init__(self, location)
self.identifier = name
self.members = members
# propagatedExtendedAttrs are the ones that should get
# propagated to our non-partial interface.
self.propagatedExtendedAttrs = []
self._haveSecureContextExtendedAttribute = False
self._nonPartialInterfaceOrNamespace = nonPartialInterfaceOrNamespace
self._finished = False
nonPartialInterfaceOrNamespace.addPartialInterface(self)
def addExtendedAttributes(self, attrs):
for attr in attrs:
identifier = attr.identifier()
if identifier in ["Constructor", "NamedConstructor"]:
self.propagatedExtendedAttrs.append(attr)
elif identifier == "SecureContext":
self._haveSecureContextExtendedAttribute = True
# This gets propagated to all our members.
for member in self.members:
if member.getExtendedAttribute("SecureContext"):
raise WebIDLError("[SecureContext] specified on both a "
"partial interface member and on the "
"partial interface itself",
[member.location, attr.location])
member.addExtendedAttributes([attr])
elif identifier == "Exposed":
# This just gets propagated to all our members.
for member in self.members:
if len(member._exposureGlobalNames) != 0:
raise WebIDLError("[Exposed] specified on both a "
"partial interface member and on the "
"partial interface itself",
[member.location, attr.location])
member.addExtendedAttributes([attr])
else:
raise WebIDLError("Unknown extended attribute %s on partial "
"interface" % identifier,
[attr.location])
def finish(self, scope):
if self._finished:
return
self._finished = True
if (not self._haveSecureContextExtendedAttribute and
self._nonPartialInterfaceOrNamespace.getExtendedAttribute("SecureContext")):
# This gets propagated to all our members.
for member in self.members:
if member.getExtendedAttribute("SecureContext"):
raise WebIDLError("[SecureContext] specified on both a "
"partial interface member and on the "
"non-partial interface",
[member.location,
self._nonPartialInterfaceOrNamespace.location])
member.addExtendedAttributes(
[IDLExtendedAttribute(self._nonPartialInterfaceOrNamespace.location,
("SecureContext",))])
# Need to make sure our non-partial interface or namespace gets
# finished so it can report cases when we only have partial
# interfaces/namespaces.
self._nonPartialInterfaceOrNamespace.finish(scope)
def validate(self):
pass
def convertExposedAttrToGlobalNameSet(exposedAttr, targetSet):
assert len(targetSet) == 0
if exposedAttr.hasValue():
targetSet.add(exposedAttr.value())
else:
assert exposedAttr.hasArgs()
targetSet.update(exposedAttr.args())
def globalNameSetToExposureSet(globalScope, nameSet, exposureSet):
for name in nameSet:
exposureSet.update(globalScope.globalNameMapping[name])
class IDLInterfaceOrNamespace(IDLObjectWithScope, IDLExposureMixins):
def __init__(self, location, parentScope, name, parent, members,
isKnownNonPartial):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
assert isKnownNonPartial or not parent
assert isKnownNonPartial or len(members) == 0
self.parent = None
self._callback = False
self._finished = False
self.members = []
self.maplikeOrSetlikeOrIterable = None
self._partialInterfaces = []
self._extendedAttrDict = {}
# namedConstructors needs deterministic ordering because bindings code
# outputs the constructs in the order that namedConstructors enumerates
# them.
self.namedConstructors = list()
self.implementedInterfaces = set()
self._consequential = False
self._isKnownNonPartial = False
# self.interfacesBasedOnSelf is the set of interfaces that inherit from
# self or have self as a consequential interface, including self itself.
# Used for distinguishability checking.
self.interfacesBasedOnSelf = set([self])
# self.interfacesImplementingSelf is the set of interfaces that directly
# have self as a consequential interface
self.interfacesImplementingSelf = set()
self._hasChildInterfaces = False
self._isOnGlobalProtoChain = False
# Tracking of the number of reserved slots we need for our
# members and those of ancestor interfaces.
self.totalMembersInSlots = 0
# Tracking of the number of own own members we have in slots
self._ownMembersInSlots = 0
# If this is an iterator interface, we need to know what iterable
# interface we're iterating for in order to get its nativeType.
self.iterableInterface = None
IDLObjectWithScope.__init__(self, location, parentScope, name)
IDLExposureMixins.__init__(self, location)
if isKnownNonPartial:
self.setNonPartial(location, parent, members)
def ctor(self):
identifier = IDLUnresolvedIdentifier(self.location, "constructor",
allowForbidden=True)
try:
return self._lookupIdentifier(identifier)
except:
return None
def isIterable(self):
return (self.maplikeOrSetlikeOrIterable and
self.maplikeOrSetlikeOrIterable.isIterable())
def isIteratorInterface(self):
return self.iterableInterface is not None
def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
assert isinstance(scope, IDLScope)
assert isinstance(originalObject, IDLInterfaceMember)
assert isinstance(newObject, IDLInterfaceMember)
retval = IDLScope.resolveIdentifierConflict(self, scope, identifier,
originalObject, newObject)
# Might be a ctor, which isn't in self.members
if newObject in self.members:
self.members.remove(newObject)
return retval
def finish(self, scope):
if self._finished:
return
self._finished = True
if not self._isKnownNonPartial:
raise WebIDLError("Interface %s does not have a non-partial "
"declaration" % self.identifier.name,
[self.location])
IDLExposureMixins.finish(self, scope)
# Now go ahead and merge in our partial interfaces.
for partial in self._partialInterfaces:
partial.finish(scope)
self.addExtendedAttributes(partial.propagatedExtendedAttrs)
self.members.extend(partial.members)
# Generate maplike/setlike interface members. Since generated members
# need to be treated like regular interface members, do this before
# things like exposure setting.
for member in self.members:
if member.isMaplikeOrSetlikeOrIterable():
# Check that we only have one interface declaration (currently
# there can only be one maplike/setlike declaration per
# interface)
if self.maplikeOrSetlikeOrIterable:
raise WebIDLError("%s declaration used on "
"interface that already has %s "
"declaration" %
(member.maplikeOrSetlikeOrIterableType,
self.maplikeOrSetlikeOrIterable.maplikeOrSetlikeOrIterableType),
[self.maplikeOrSetlikeOrIterable.location,
member.location])
self.maplikeOrSetlikeOrIterable = member
# If we've got a maplike or setlike declaration, we'll be building all of
# our required methods in Codegen. Generate members now.
self.maplikeOrSetlikeOrIterable.expand(self.members, self.isJSImplemented())
# Now that we've merged in our partial interfaces, set the
# _exposureGlobalNames on any members that don't have it set yet. Note
# that any partial interfaces that had [Exposed] set have already set up
# _exposureGlobalNames on all the members coming from them, so this is
# just implementing the "members default to interface that defined them"
# and "partial interfaces default to interface they're a partial for"
# rules from the spec.
for m in self.members:
# If m, or the partial interface m came from, had [Exposed]
# specified, it already has a nonempty exposure global names set.
if len(m._exposureGlobalNames) == 0:
m._exposureGlobalNames.update(self._exposureGlobalNames)
assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder)
parent = self.parent.finish(scope) if self.parent else None
if parent and isinstance(parent, IDLExternalInterface):
raise WebIDLError("%s inherits from %s which does not have "
"a definition" %
(self.identifier.name,
self.parent.identifier.name),
[self.location])
assert not parent or isinstance(parent, IDLInterface)
self.parent = parent
assert iter(self.members)
if self.isNamespace():
assert not self.parent
for m in self.members:
if m.isAttr() or m.isMethod():
if m.isStatic():
raise WebIDLError("Don't mark things explicitly static "
"in namespaces",
[self.location, m.location])
# Just mark all our methods/attributes as static. The other
# option is to duplicate the relevant InterfaceMembers
# production bits but modified to produce static stuff to
# start with, but that sounds annoying.
m.forceStatic()
if self.parent:
self.parent.finish(scope)
self.parent._hasChildInterfaces = True
self.totalMembersInSlots = self.parent.totalMembersInSlots
# Interfaces with [Global] or [PrimaryGlobal] must not
# have anything inherit from them
if (self.parent.getExtendedAttribute("Global") or
self.parent.getExtendedAttribute("PrimaryGlobal")):
# Note: This is not a self.parent.isOnGlobalProtoChain() check
# because ancestors of a [Global] interface can have other
# descendants.
raise WebIDLError("[Global] interface has another interface "
"inheriting from it",
[self.location, self.parent.location])
# Make sure that we're not exposed in places where our parent is not
if not self.exposureSet.issubset(self.parent.exposureSet):
raise WebIDLError("Interface %s is exposed in globals where its "
"parent interface %s is not exposed." %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Callbacks must not inherit from non-callbacks or inherit from
# anything that has consequential interfaces.
# XXXbz Can non-callbacks inherit from callbacks? Spec issue pending.
# XXXbz Can callbacks have consequential interfaces? Spec issue pending
if self.isCallback():
if not self.parent.isCallback():
raise WebIDLError("Callback interface %s inheriting from "
"non-callback interface %s" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
elif self.parent.isCallback():
raise WebIDLError("Non-callback interface %s inheriting from "
"callback interface %s" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Interfaces which have interface objects can't inherit
# from [NoInterfaceObject] interfaces.
if (self.parent.getExtendedAttribute("NoInterfaceObject") and
not self.getExtendedAttribute("NoInterfaceObject")):
raise WebIDLError("Interface %s does not have "
"[NoInterfaceObject] but inherits from "
"interface %s which does" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Interfaces that are not [SecureContext] can't inherit
# from [SecureContext] interfaces.
if (self.parent.getExtendedAttribute("SecureContext") and
not self.getExtendedAttribute("SecureContext")):
raise WebIDLError("Interface %s does not have "
"[SecureContext] but inherits from "
"interface %s which does" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
for iface in self.implementedInterfaces:
iface.finish(scope)
cycleInGraph = self.findInterfaceLoopPoint(self)
if cycleInGraph:
raise WebIDLError("Interface %s has itself as ancestor or "
"implemented interface" % self.identifier.name,
[self.location, cycleInGraph.location])
if self.isCallback():
# "implements" should have made sure we have no
# consequential interfaces.
assert len(self.getConsequentialInterfaces()) == 0
# And that we're not consequential.
assert not self.isConsequential()
# Now resolve() and finish() our members before importing the
# ones from our implemented interfaces.
# resolve() will modify self.members, so we need to iterate
# over a copy of the member list here.
for member in list(self.members):
member.resolve(self)
for member in self.members:
member.finish(scope)
# Now that we've finished our members, which has updated their exposure
# sets, make sure they aren't exposed in places where we are not.
for member in self.members:
if not member.exposureSet.issubset(self.exposureSet):
raise WebIDLError("Interface member has larger exposure set "
"than the interface itself",
[member.location, self.location])
ctor = self.ctor()
if ctor is not None:
assert len(ctor._exposureGlobalNames) == 0
ctor._exposureGlobalNames.update(self._exposureGlobalNames)
ctor.finish(scope)
for ctor in self.namedConstructors:
assert len(ctor._exposureGlobalNames) == 0
ctor._exposureGlobalNames.update(self._exposureGlobalNames)
ctor.finish(scope)
# Make a copy of our member list, so things that implement us
# can get those without all the stuff we implement ourselves
# admixed.
self.originalMembers = list(self.members)
# Import everything from our consequential interfaces into
# self.members. Sort our consequential interfaces by name
# just so we have a consistent order.
for iface in sorted(self.getConsequentialInterfaces(),
cmp=cmp,
key=lambda x: x.identifier.name):
# Flag the interface as being someone's consequential interface
iface.setIsConsequentialInterfaceOf(self)
# Verify that we're not exposed somewhere where iface is not exposed
if not self.exposureSet.issubset(iface.exposureSet):
raise WebIDLError("Interface %s is exposed in globals where its "
"consequential interface %s is not exposed." %
(self.identifier.name, iface.identifier.name),
[self.location, iface.location])
# If we have a maplike or setlike, and the consequential interface
# also does, throw an error.
if iface.maplikeOrSetlikeOrIterable and self.maplikeOrSetlikeOrIterable:
raise WebIDLError("Maplike/setlike/iterable interface %s cannot have "
"maplike/setlike/iterable interface %s as a "
"consequential interface" %
(self.identifier.name,
iface.identifier.name),
[self.maplikeOrSetlikeOrIterable.location,
iface.maplikeOrSetlikeOrIterable.location])
additionalMembers = iface.originalMembers
for additionalMember in additionalMembers:
for member in self.members:
if additionalMember.identifier.name == member.identifier.name:
raise WebIDLError(
"Multiple definitions of %s on %s coming from 'implements' statements" %
(member.identifier.name, self),
[additionalMember.location, member.location])
self.members.extend(additionalMembers)
iface.interfacesImplementingSelf.add(self)
for ancestor in self.getInheritedInterfaces():
ancestor.interfacesBasedOnSelf.add(self)
if (ancestor.maplikeOrSetlikeOrIterable is not None and
self.maplikeOrSetlikeOrIterable is not None):
raise WebIDLError("Cannot have maplike/setlike on %s that "
"inherits %s, which is already "
"maplike/setlike" %
(self.identifier.name,
ancestor.identifier.name),
[self.maplikeOrSetlikeOrIterable.location,
ancestor.maplikeOrSetlikeOrIterable.location])
for ancestorConsequential in ancestor.getConsequentialInterfaces():
ancestorConsequential.interfacesBasedOnSelf.add(self)
# Deal with interfaces marked [Unforgeable], now that we have our full
# member list, except unforgeables pulled in from parents. We want to
# do this before we set "originatingInterface" on our unforgeable
# members.
if self.getExtendedAttribute("Unforgeable"):
# Check that the interface already has all the things the
# spec would otherwise require us to synthesize and is
# missing the ones we plan to synthesize.
if not any(m.isMethod() and m.isStringifier() for m in self.members):
raise WebIDLError("Unforgeable interface %s does not have a "
"stringifier" % self.identifier.name,
[self.location])
for m in self.members:
if ((m.isMethod() and m.isJsonifier()) or
m.identifier.name == "toJSON"):
raise WebIDLError("Unforgeable interface %s has a "
"jsonifier so we won't be able to add "
"one ourselves" % self.identifier.name,
[self.location, m.location])
if m.identifier.name == "valueOf" and not m.isStatic():
raise WebIDLError("Unforgeable interface %s has a valueOf "
"member so we won't be able to add one "
"ourselves" % self.identifier.name,
[self.location, m.location])
for member in self.members:
if ((member.isAttr() or member.isMethod()) and
member.isUnforgeable() and
not hasattr(member, "originatingInterface")):
member.originatingInterface = self
# Compute slot indices for our members before we pull in unforgeable
# members from our parent. Also, maplike/setlike declarations get a
# slot to hold their backing object.
for member in self.members:
if ((member.isAttr() and
(member.getExtendedAttribute("StoreInSlot") or
member.getExtendedAttribute("Cached"))) or
member.isMaplikeOrSetlike()):
if member.slotIndices is None:
member.slotIndices = dict()
member.slotIndices[self.identifier.name] = self.totalMembersInSlots
self.totalMembersInSlots += 1
if member.getExtendedAttribute("StoreInSlot"):
self._ownMembersInSlots += 1
if self.parent:
# Make sure we don't shadow any of the [Unforgeable] attributes on
# our ancestor interfaces. We don't have to worry about
# consequential interfaces here, because those have already been
# imported into the relevant .members lists. And we don't have to
# worry about anything other than our parent, because it has already
# imported its ancestors unforgeable attributes into its member
# list.
for unforgeableMember in (member for member in self.parent.members if
(member.isAttr() or member.isMethod()) and
member.isUnforgeable()):
shadows = [m for m in self.members if
(m.isAttr() or m.isMethod()) and
not m.isStatic() and
m.identifier.name == unforgeableMember.identifier.name]
if len(shadows) != 0:
locs = [unforgeableMember.location] + [s.location for s
in shadows]
raise WebIDLError("Interface %s shadows [Unforgeable] "
"members of %s" %
(self.identifier.name,
ancestor.identifier.name),
locs)
# And now just stick it in our members, since we won't be
# inheriting this down the proto chain. If we really cared we
# could try to do something where we set up the unforgeable
# attributes/methods of ancestor interfaces, with their
# corresponding getters, on our interface, but that gets pretty
# complicated and seems unnecessary.
self.members.append(unforgeableMember)
# At this point, we have all of our members. If the current interface
# uses maplike/setlike, check for collisions anywhere in the current
# interface or higher in the inheritance chain.
if self.maplikeOrSetlikeOrIterable:
testInterface = self
isAncestor = False
while testInterface:
self.maplikeOrSetlikeOrIterable.checkCollisions(testInterface.members,
isAncestor)
isAncestor = True
testInterface = testInterface.parent
# Ensure that there's at most one of each {named,indexed}
# {getter,setter,creator,deleter}, at most one stringifier,
# and at most one legacycaller. Note that this last is not
# quite per spec, but in practice no one overloads
# legacycallers.
specialMembersSeen = {}
for member in self.members:
if not member.isMethod():
continue
if member.isGetter():
memberType = "getters"
elif member.isSetter():
memberType = "setters"
elif member.isCreator():
memberType = "creators"
elif member.isDeleter():
memberType = "deleters"
elif member.isStringifier():
memberType = "stringifiers"
elif member.isJsonifier():
memberType = "jsonifiers"
elif member.isLegacycaller():
memberType = "legacycallers"
else:
continue
if (memberType != "stringifiers" and memberType != "legacycallers" and
memberType != "jsonifiers"):
if member.isNamed():
memberType = "named " + memberType
else:
assert member.isIndexed()
memberType = "indexed " + memberType
if memberType in specialMembersSeen:
raise WebIDLError("Multiple " + memberType + " on %s" % (self),
[self.location,
specialMembersSeen[memberType].location,
member.location])
specialMembersSeen[memberType] = member
if self.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
# Check that we have a named getter.
if "named getters" not in specialMembersSeen:
raise WebIDLError(
"Interface with [LegacyUnenumerableNamedProperties] does "
"not have a named getter",
[self.location])
ancestor = self.parent
while ancestor:
if ancestor.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
raise WebIDLError(
"Interface with [LegacyUnenumerableNamedProperties] "
"inherits from another interface with "
"[LegacyUnenumerableNamedProperties]",
[self.location, ancestor.location])
ancestor = ancestor.parent
if self._isOnGlobalProtoChain:
# Make sure we have no named setters, creators, or deleters
for memberType in ["setter", "creator", "deleter"]:
memberId = "named " + memberType + "s"
if memberId in specialMembersSeen:
raise WebIDLError("Interface with [Global] has a named %s" %
memberType,
[self.location,
specialMembersSeen[memberId].location])
# Make sure we're not [OverrideBuiltins]
if self.getExtendedAttribute("OverrideBuiltins"):
raise WebIDLError("Interface with [Global] also has "
"[OverrideBuiltins]",
[self.location])
# Mark all of our ancestors as being on the global's proto chain too
parent = self.parent
while parent:
# Must not inherit from an interface with [OverrideBuiltins]
if parent.getExtendedAttribute("OverrideBuiltins"):
raise WebIDLError("Interface with [Global] inherits from "
"interface with [OverrideBuiltins]",
[self.location, parent.location])
parent._isOnGlobalProtoChain = True
parent = parent.parent
def validate(self):
# We don't support consequential unforgeable interfaces. Need to check
# this here, because in finish() an interface might not know yet that
# it's consequential.
if self.getExtendedAttribute("Unforgeable") and self.isConsequential():
raise WebIDLError(
"%s is an unforgeable consequential interface" %
self.identifier.name,
[self.location] +
list(i.location for i in
(self.interfacesBasedOnSelf - {self})))
# We also don't support inheriting from unforgeable interfaces.
if self.getExtendedAttribute("Unforgeable") and self.hasChildInterfaces():
locations = ([self.location] +
list(i.location for i in
self.interfacesBasedOnSelf if i.parent == self))
raise WebIDLError("%s is an unforgeable ancestor interface" %
self.identifier.name,
locations)
indexedGetter = None
hasLengthAttribute = False
for member in self.members:
member.validate()
if self.isCallback() and member.getExtendedAttribute("Replaceable"):
raise WebIDLError("[Replaceable] used on an attribute on "
"interface %s which is a callback interface" %
self.identifier.name,
[self.location, member.location])
# Check that PutForwards refers to another attribute and that no
# cycles exist in forwarded assignments. Also check for a
# integer-typed "length" attribute.
if member.isAttr():
if (member.identifier.name == "length" and
member.type.isInteger()):
hasLengthAttribute = True
iface = self
attr = member
putForwards = attr.getExtendedAttribute("PutForwards")
if putForwards and self.isCallback():
raise WebIDLError("[PutForwards] used on an attribute "
"on interface %s which is a callback "
"interface" % self.identifier.name,
[self.location, member.location])
while putForwards is not None:
forwardIface = attr.type.unroll().inner
fowardAttr = None
for forwardedMember in forwardIface.members:
if (not forwardedMember.isAttr() or
forwardedMember.identifier.name != putForwards[0]):
continue
if forwardedMember == member:
raise WebIDLError("Cycle detected in forwarded "
"assignments for attribute %s on "
"%s" %
(member.identifier.name, self),
[member.location])
fowardAttr = forwardedMember
break
if fowardAttr is None:
raise WebIDLError("Attribute %s on %s forwards to "
"missing attribute %s" %
(attr.identifier.name, iface, putForwards),
[attr.location])
iface = forwardIface
attr = fowardAttr
putForwards = attr.getExtendedAttribute("PutForwards")
# Check that the name of an [Alias] doesn't conflict with an
# interface member and whether we support indexed properties.
if member.isMethod():
if member.isGetter() and member.isIndexed():
indexedGetter = member
for alias in member.aliases:
if self.isOnGlobalProtoChain():
raise WebIDLError("[Alias] must not be used on a "
"[Global] interface operation",
[member.location])
if (member.getExtendedAttribute("Exposed") or
member.getExtendedAttribute("ChromeOnly") or
member.getExtendedAttribute("Pref") or
member.getExtendedAttribute("Func") or
member.getExtendedAttribute("SecureContext") or
member.getExtendedAttribute("AvailableIn") or
member.getExtendedAttribute("CheckAnyPermissions") or
member.getExtendedAttribute("CheckAllPermissions")):
raise WebIDLError("[Alias] must not be used on a "
"conditionally exposed operation",
[member.location])
if member.isStatic():
raise WebIDLError("[Alias] must not be used on a "
"static operation",
[member.location])
if member.isIdentifierLess():
raise WebIDLError("[Alias] must not be used on an "
"identifierless operation",
[member.location])
if member.isUnforgeable():
raise WebIDLError("[Alias] must not be used on an "
"[Unforgeable] operation",
[member.location])
for m in self.members:
if m.identifier.name == alias:
raise WebIDLError("[Alias=%s] has same name as "
"interface member" % alias,
[member.location, m.location])
if m.isMethod() and m != member and alias in m.aliases:
raise WebIDLError("duplicate [Alias=%s] definitions" %
alias,
[member.location, m.location])
for attribute in ["CheckAnyPermissions", "CheckAllPermissions"]:
if (self.getExtendedAttribute(attribute) and
self._exposureGlobalNames != set([self.parentScope.primaryGlobalName])):
raise WebIDLError("[%s] used on an interface that is "
"not %s-only" %
(attribute, self.parentScope.primaryGlobalName),
[self.location])
# Conditional exposure makes no sense for interfaces with no
# interface object, unless they're navigator properties.
if (self.isExposedConditionally() and
not self.hasInterfaceObject() and
not self.isNavigatorProperty()):
raise WebIDLError("Interface with no interface object is "
"exposed conditionally",
[self.location])
# Value iterators are only allowed on interfaces with indexed getters,
# and pair iterators are only allowed on interfaces without indexed
# getters.
if self.isIterable():
iterableDecl = self.maplikeOrSetlikeOrIterable
if iterableDecl.isValueIterator():
if not indexedGetter:
raise WebIDLError("Interface with value iterator does not "
"support indexed properties",
[self.location])
if iterableDecl.valueType != indexedGetter.signatures()[0][0]:
raise WebIDLError("Iterable type does not match indexed "
"getter type",
[iterableDecl.location,
indexedGetter.location])
if not hasLengthAttribute:
raise WebIDLError('Interface with value iterator does not '
'have an integer-typed "length" attribute',
[self.location])
else:
assert iterableDecl.isPairIterator()
if indexedGetter:
raise WebIDLError("Interface with pair iterator supports "
"indexed properties",
[self.location, iterableDecl.location,
indexedGetter.location])
def isExternal(self):
return False
def setIsConsequentialInterfaceOf(self, other):
self._consequential = True
self.interfacesBasedOnSelf.add(other)
def isConsequential(self):
return self._consequential
def setCallback(self, value):
self._callback = value
def isCallback(self):
return self._callback
def isSingleOperationInterface(self):
assert self.isCallback() or self.isJSImplemented()
return (
# JS-implemented things should never need the
# this-handling weirdness of single-operation interfaces.
not self.isJSImplemented() and
# Not inheriting from another interface
not self.parent and
# No consequential interfaces
len(self.getConsequentialInterfaces()) == 0 and
# No attributes of any kinds
not any(m.isAttr() for m in self.members) and
# There is at least one regular operation, and all regular
# operations have the same identifier
len(set(m.identifier.name for m in self.members if
m.isMethod() and not m.isStatic())) == 1)
def inheritanceDepth(self):
depth = 0
parent = self.parent
while parent:
depth = depth + 1
parent = parent.parent
return depth
def hasConstants(self):
return any(m.isConst() for m in self.members)
def hasInterfaceObject(self):
if self.isCallback():
return self.hasConstants()
return not hasattr(self, "_noInterfaceObject")
def hasInterfacePrototypeObject(self):
return (not self.isCallback() and not self.isNamespace()
and self.getUserData('hasConcreteDescendant', False))
def addImplementedInterface(self, implementedInterface):
assert(isinstance(implementedInterface, IDLInterface))
self.implementedInterfaces.add(implementedInterface)
def getInheritedInterfaces(self):
"""
Returns a list of the interfaces this interface inherits from
(not including this interface itself). The list is in order
from most derived to least derived.
"""
assert(self._finished)
if not self.parent:
return []
parentInterfaces = self.parent.getInheritedInterfaces()
parentInterfaces.insert(0, self.parent)
return parentInterfaces
def getConsequentialInterfaces(self):
assert(self._finished)
# The interfaces we implement directly
consequentialInterfaces = set(self.implementedInterfaces)
# And their inherited interfaces
for iface in self.implementedInterfaces:
consequentialInterfaces |= set(iface.getInheritedInterfaces())
# And now collect up the consequential interfaces of all of those
temp = set()
for iface in consequentialInterfaces:
temp |= iface.getConsequentialInterfaces()
return consequentialInterfaces | temp
def findInterfaceLoopPoint(self, otherInterface):
"""
Finds an interface, amongst our ancestors and consequential interfaces,
that inherits from otherInterface or implements otherInterface
directly. If there is no such interface, returns None.
"""
if self.parent:
if self.parent == otherInterface:
return self
loopPoint = self.parent.findInterfaceLoopPoint(otherInterface)
if loopPoint:
return loopPoint
if otherInterface in self.implementedInterfaces:
return self
for iface in self.implementedInterfaces:
loopPoint = iface.findInterfaceLoopPoint(otherInterface)
if loopPoint:
return loopPoint
return None
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def setNonPartial(self, location, parent, members):
assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
if self._isKnownNonPartial:
raise WebIDLError("Two non-partial definitions for the "
"same %s" %
("interface" if self.isInterface()
else "namespace"),
[location, self.location])
self._isKnownNonPartial = True
# Now make it look like we were parsed at this new location, since
# that's the place where the interface is "really" defined
self.location = location
assert not self.parent
self.parent = parent
# Put the new members at the beginning
self.members = members + self.members
def addPartialInterface(self, partial):
assert self.identifier.name == partial.identifier.name
self._partialInterfaces.append(partial)
def getJSImplementation(self):
classId = self.getExtendedAttribute("JSImplementation")
if not classId:
return classId
assert isinstance(classId, list)
assert len(classId) == 1
return classId[0]
def isJSImplemented(self):
return bool(self.getJSImplementation())
def isProbablyShortLivingObject(self):
current = self
while current:
if current.getExtendedAttribute("ProbablyShortLivingObject"):
return True
current = current.parent
return False
def isNavigatorProperty(self):
naviProp = self.getExtendedAttribute("NavigatorProperty")
if not naviProp:
return False
assert len(naviProp) == 1
assert isinstance(naviProp, list)
assert len(naviProp[0]) != 0
return True
def getNavigatorProperty(self):
naviProp = self.getExtendedAttribute("NavigatorProperty")
if not naviProp:
return None
assert len(naviProp) == 1
assert isinstance(naviProp, list)
assert len(naviProp[0]) != 0
conditionExtendedAttributes = self._extendedAttrDict.viewkeys() & IDLInterfaceOrNamespace.conditionExtendedAttributes
attr = IDLAttribute(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"), naviProp[0]),
IDLUnresolvedType(self.location, IDLUnresolvedIdentifier(self.location, self.identifier.name)),
True,
extendedAttrDict={ a: self._extendedAttrDict[a] for a in conditionExtendedAttributes },
navigatorObjectGetter=True)
attr._exposureGlobalNames = self._exposureGlobalNames
# We're abusing Constant a little bit here, because we need Cached. The
# getter will create a new object every time, but we're never going to
# clear the cached value.
extendedAttrs = [ IDLExtendedAttribute(self.location, ("Throws", )),
IDLExtendedAttribute(self.location, ("Cached", )),
IDLExtendedAttribute(self.location, ("Constant", )) ]
attr.addExtendedAttributes(extendedAttrs)
return attr
def hasChildInterfaces(self):
return self._hasChildInterfaces
def isOnGlobalProtoChain(self):
return self._isOnGlobalProtoChain
def _getDependentObjects(self):
deps = set(self.members)
deps.update(self.implementedInterfaces)
if self.parent:
deps.add(self.parent)
return deps
def hasMembersInSlots(self):
return self._ownMembersInSlots != 0
conditionExtendedAttributes = [ "Pref", "ChromeOnly", "Func", "AvailableIn",
"SecureContext",
"CheckAnyPermissions",
"CheckAllPermissions" ]
def isExposedConditionally(self):
return any(self.getExtendedAttribute(a) for a in self.conditionExtendedAttributes)
class IDLInterface(IDLInterfaceOrNamespace):
def __init__(self, location, parentScope, name, parent, members,
isKnownNonPartial):
IDLInterfaceOrNamespace.__init__(self, location, parentScope, name,
parent, members, isKnownNonPartial)
def __str__(self):
return "Interface '%s'" % self.identifier.name
def isInterface(self):
return True
def addExtendedAttributes(self, attrs):
for attr in attrs:
identifier = attr.identifier()
# Special cased attrs
if identifier == "TreatNonCallableAsNull":
raise WebIDLError("TreatNonCallableAsNull cannot be specified on interfaces",
[attr.location, self.location])
if identifier == "TreatNonObjectAsNull":
raise WebIDLError("TreatNonObjectAsNull cannot be specified on interfaces",
[attr.location, self.location])
elif identifier == "NoInterfaceObject":
if not attr.noArguments():
raise WebIDLError("[NoInterfaceObject] must take no arguments",
[attr.location])
if self.ctor():
raise WebIDLError("Constructor and NoInterfaceObject are incompatible",
[self.location])
self._noInterfaceObject = True
elif identifier == "Constructor" or identifier == "NamedConstructor" or identifier == "ChromeConstructor":
if identifier == "Constructor" and not self.hasInterfaceObject():
raise WebIDLError(str(identifier) + " and NoInterfaceObject are incompatible",
[self.location])
if identifier == "NamedConstructor" and not attr.hasValue():
raise WebIDLError("NamedConstructor must either take an identifier or take a named argument list",
[attr.location])
if identifier == "ChromeConstructor" and not self.hasInterfaceObject():
raise WebIDLError(str(identifier) + " and NoInterfaceObject are incompatible",
[self.location])
args = attr.args() if attr.hasArgs() else []
if self.identifier.name == "Promise":
promiseType = BuiltinTypes[IDLBuiltinType.Types.any]
else:
promiseType = None
retType = IDLWrapperType(self.location, self, promiseType)
if identifier == "Constructor" or identifier == "ChromeConstructor":
name = "constructor"
allowForbidden = True
else:
name = attr.value()
allowForbidden = False
methodIdentifier = IDLUnresolvedIdentifier(self.location, name,
allowForbidden=allowForbidden)
method = IDLMethod(self.location, methodIdentifier, retType,
args, static=True)
# Constructors are always NewObject and are always
# assumed to be able to throw (since there's no way to
# indicate otherwise) and never have any other
# extended attributes.
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("NewObject",)),
IDLExtendedAttribute(self.location, ("Throws",))])
if identifier == "ChromeConstructor":
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("ChromeOnly",))])
if identifier == "Constructor" or identifier == "ChromeConstructor":
method.resolve(self)
else:
# We need to detect conflicts for NamedConstructors across
# interfaces. We first call resolve on the parentScope,
# which will merge all NamedConstructors with the same
# identifier accross interfaces as overloads.
method.resolve(self.parentScope)
# Then we look up the identifier on the parentScope. If the
# result is the same as the method we're adding then it
# hasn't been added as an overload and it's the first time
# we've encountered a NamedConstructor with that identifier.
# If the result is not the same as the method we're adding
# then it has been added as an overload and we need to check
# whether the result is actually one of our existing
# NamedConstructors.
newMethod = self.parentScope.lookupIdentifier(method.identifier)
if newMethod == method:
self.namedConstructors.append(method)
elif newMethod not in self.namedConstructors:
raise WebIDLError("NamedConstructor conflicts with a NamedConstructor of a different interface",
[method.location, newMethod.location])
elif (identifier == "ArrayClass"):
if not attr.noArguments():
raise WebIDLError("[ArrayClass] must take no arguments",
[attr.location])
if self.parent:
raise WebIDLError("[ArrayClass] must not be specified on "
"an interface with inherited interfaces",
[attr.location, self.location])
elif (identifier == "ExceptionClass"):
if not attr.noArguments():
raise WebIDLError("[ExceptionClass] must take no arguments",
[attr.location])
if self.parent:
raise WebIDLError("[ExceptionClass] must not be specified on "
"an interface with inherited interfaces",
[attr.location, self.location])
elif identifier == "Global":
if attr.hasValue():
self.globalNames = [attr.value()]
elif attr.hasArgs():
self.globalNames = attr.args()
else:
self.globalNames = [self.identifier.name]
self.parentScope.globalNames.update(self.globalNames)
for globalName in self.globalNames:
self.parentScope.globalNameMapping[globalName].add(self.identifier.name)
self._isOnGlobalProtoChain = True
elif identifier == "PrimaryGlobal":
if not attr.noArguments():
raise WebIDLError("[PrimaryGlobal] must take no arguments",
[attr.location])
if self.parentScope.primaryGlobalAttr is not None:
raise WebIDLError(
"[PrimaryGlobal] specified twice",
[attr.location,
self.parentScope.primaryGlobalAttr.location])
self.parentScope.primaryGlobalAttr = attr
self.parentScope.primaryGlobalName = self.identifier.name
self.parentScope.globalNames.add(self.identifier.name)
self.parentScope.globalNameMapping[self.identifier.name].add(self.identifier.name)
self._isOnGlobalProtoChain = True
elif identifier == "SecureContext":
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
# This gets propagated to all our members.
for member in self.members:
if member.getExtendedAttribute("SecureContext"):
raise WebIDLError("[SecureContext] specified on both "
"an interface member and on the "
"interface itself",
[member.location, attr.location])
member.addExtendedAttributes([attr])
elif (identifier == "NeedResolve" or
identifier == "OverrideBuiltins" or
identifier == "ChromeOnly" or
identifier == "Unforgeable" or
identifier == "UnsafeInPrerendering" or
identifier == "LegacyEventInit" or
identifier == "ProbablyShortLivingObject" or
identifier == "LegacyUnenumerableNamedProperties" or
identifier == "NonOrdinaryGetPrototypeOf" or
identifier == "Abstract"):
# Known extended attributes that do not take values
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr,
self._exposureGlobalNames)
elif (identifier == "Pref" or
identifier == "JSImplementation" or
identifier == "HeaderFile" or
identifier == "NavigatorProperty" or
identifier == "AvailableIn" or
identifier == "Func" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "Deprecated"):
# Known extended attributes that take a string value
if not attr.hasValue():
raise WebIDLError("[%s] must have a value" % identifier,
[attr.location])
else:
raise WebIDLError("Unknown extended attribute %s on interface" % identifier,
[attr.location])
attrlist = attr.listValue()
self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
class IDLNamespace(IDLInterfaceOrNamespace):
def __init__(self, location, parentScope, name, members, isKnownNonPartial):
IDLInterfaceOrNamespace.__init__(self, location, parentScope, name,
None, members, isKnownNonPartial)
def __str__(self):
return "Namespace '%s'" % self.identifier.name
def isNamespace(self):
return True
def addExtendedAttributes(self, attrs):
# The set of things namespaces support is small enough it's simpler
# to factor out into a separate method than it is to sprinkle
# isNamespace() checks all through
# IDLInterfaceOrNamespace.addExtendedAttributes.
for attr in attrs:
identifier = attr.identifier()
if identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr,
self._exposureGlobalNames)
elif identifier == "ClassString":
# Takes a string value to override the default "Object" if
# desired.
if not attr.hasValue():
raise WebIDLError("[%s] must have a value" % identifier,
[attr.location])
elif identifier == "ProtoObjectHack":
if not attr.noArguments():
raise WebIDLError("[%s] must not have arguments" % identifier,
[attr.location])
else:
raise WebIDLError("Unknown extended attribute %s on namespace" %
identifier,
[attr.location])
attrlist = attr.listValue()
self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
class IDLDictionary(IDLObjectWithScope):
def __init__(self, location, parentScope, name, parent, members):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
self.parent = parent
self._finished = False
self.members = list(members)
IDLObjectWithScope.__init__(self, location, parentScope, name)
def __str__(self):
return "Dictionary '%s'" % self.identifier.name
def isDictionary(self):
return True
def canBeEmpty(self):
"""
Returns true if this dictionary can be empty (that is, it has no
required members and neither do any of its ancestors).
"""
return (all(member.optional for member in self.members) and
(not self.parent or self.parent.canBeEmpty()))
def finish(self, scope):
if self._finished:
return
self._finished = True
if self.parent:
assert isinstance(self.parent, IDLIdentifierPlaceholder)
oldParent = self.parent
self.parent = self.parent.finish(scope)
if not isinstance(self.parent, IDLDictionary):
raise WebIDLError("Dictionary %s has parent that is not a dictionary" %
self.identifier.name,
[oldParent.location, self.parent.location])
# Make sure the parent resolves all its members before we start
# looking at them.
self.parent.finish(scope)
for member in self.members:
member.resolve(self)
if not member.isComplete():
member.complete(scope)
assert member.type.isComplete()
# Members of a dictionary are sorted in lexicographic order
self.members.sort(cmp=cmp, key=lambda x: x.identifier.name)
inheritedMembers = []
ancestor = self.parent
while ancestor:
if ancestor == self:
raise WebIDLError("Dictionary %s has itself as an ancestor" %
self.identifier.name,
[self.identifier.location])
inheritedMembers.extend(ancestor.members)
ancestor = ancestor.parent
# Catch name duplication
for inheritedMember in inheritedMembers:
for member in self.members:
if member.identifier.name == inheritedMember.identifier.name:
raise WebIDLError("Dictionary %s has two members with name %s" %
(self.identifier.name, member.identifier.name),
[member.location, inheritedMember.location])
def validate(self):
def typeContainsDictionary(memberType, dictionary):
"""
Returns a tuple whose:
- First element is a Boolean value indicating whether
memberType contains dictionary.
- Second element is:
A list of locations that leads from the type that was passed in
the memberType argument, to the dictionary being validated,
if the boolean value in the first element is True.
None, if the boolean value in the first element is False.
"""
if (memberType.nullable() or
memberType.isArray() or
memberType.isSequence() or
memberType.isMozMap()):
return typeContainsDictionary(memberType.inner, dictionary)
if memberType.isDictionary():
if memberType.inner == dictionary:
return (True, [memberType.location])
(contains, locations) = dictionaryContainsDictionary(memberType.inner,
dictionary)
if contains:
return (True, [memberType.location] + locations)
if memberType.isUnion():
for member in memberType.flatMemberTypes:
(contains, locations) = typeContainsDictionary(member, dictionary)
if contains:
return (True, locations)
return (False, None)
def dictionaryContainsDictionary(dictMember, dictionary):
for member in dictMember.members:
(contains, locations) = typeContainsDictionary(member.type, dictionary)
if contains:
return (True, [member.location] + locations)
if dictMember.parent:
if dictMember.parent == dictionary:
return (True, [dictMember.location])
else:
(contains, locations) = dictionaryContainsDictionary(dictMember.parent, dictionary)
if contains:
return (True, [dictMember.location] + locations)
return (False, None)
for member in self.members:
if member.type.isDictionary() and member.type.nullable():
raise WebIDLError("Dictionary %s has member with nullable "
"dictionary type" % self.identifier.name,
[member.location])
(contains, locations) = typeContainsDictionary(member.type, self)
if contains:
raise WebIDLError("Dictionary %s has member with itself as type." %
self.identifier.name,
[member.location] + locations)
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
deps = set(self.members)
if (self.parent):
deps.add(self.parent)
return deps
class IDLEnum(IDLObjectWithIdentifier):
def __init__(self, location, parentScope, name, values):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
if len(values) != len(set(values)):
raise WebIDLError("Enum %s has multiple identical strings" % name.name,
[location])
IDLObjectWithIdentifier.__init__(self, location, parentScope, name)
self._values = values
def values(self):
return self._values
def finish(self, scope):
pass
def validate(self):
pass
def isEnum(self):
return True
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
return set()
class IDLType(IDLObject):
Tags = enum(
# The integer types
'int8',
'uint8',
'int16',
'uint16',
'int32',
'uint32',
'int64',
'uint64',
# Additional primitive types
'bool',
'unrestricted_float',
'float',
'unrestricted_double',
# "double" last primitive type to match IDLBuiltinType
'double',
# Other types
'any',
'domstring',
'bytestring',
'usvstring',
'object',
'date',
'void',
# Funny stuff
'interface',
'dictionary',
'enum',
'callback',
'union',
'sequence',
'mozmap',
'array'
)
def __init__(self, location, name):
IDLObject.__init__(self, location)
self.name = name
self.builtin = False
def __eq__(self, other):
return other and self.builtin == other.builtin and self.name == other.name
def __ne__(self, other):
return not self == other
def __str__(self):
return str(self.name)
def isType(self):
return True
def nullable(self):
return False
def isPrimitive(self):
return False
def isBoolean(self):
return False
def isNumeric(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return self.name == "Void"
def isSequence(self):
return False
def isMozMap(self):
return False
def isArray(self):
return False
def isArrayBuffer(self):
return False
def isArrayBufferView(self):
return False
def isSharedArrayBuffer(self):
return False
def isTypedArray(self):
return False
def isCallbackInterface(self):
return False
def isNonCallbackInterface(self):
return False
def isGeckoInterface(self):
""" Returns a boolean indicating whether this type is an 'interface'
type that is implemented in Gecko. At the moment, this returns
true for all interface types that are not types from the TypedArray
spec."""
return self.isInterface() and not self.isSpiderMonkeyInterface()
def isSpiderMonkeyInterface(self):
""" Returns a boolean indicating whether this type is an 'interface'
type that is implemented in Spidermonkey. At the moment, this
only returns true for the types from the TypedArray spec. """
return self.isInterface() and (self.isArrayBuffer() or
self.isArrayBufferView() or
self.isSharedArrayBuffer() or
self.isTypedArray())
def isDictionary(self):
return False
def isInterface(self):
return False
def isAny(self):
return self.tag() == IDLType.Tags.any
def isDate(self):
return self.tag() == IDLType.Tags.date
def isObject(self):
return self.tag() == IDLType.Tags.object
def isPromise(self):
return False
def isComplete(self):
return True
def includesRestrictedFloat(self):
return False
def isFloat(self):
return False
def isUnrestricted(self):
# Should only call this on float types
assert self.isFloat()
def isSerializable(self):
return False
def tag(self):
assert False # Override me!
def treatNonCallableAsNull(self):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner.callback._treatNonCallableAsNull
def treatNonObjectAsNull(self):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner.callback._treatNonObjectAsNull
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def resolveType(self, parentScope):
pass
def unroll(self):
return self
def isDistinguishableFrom(self, other):
raise TypeError("Can't tell whether a generic type is or is not "
"distinguishable from other things")
def isExposedInAllOf(self, exposureSet):
return True
class IDLUnresolvedType(IDLType):
"""
Unresolved types are interface types
"""
def __init__(self, location, name, promiseInnerType=None):
IDLType.__init__(self, location, name)
self._promiseInnerType = promiseInnerType
def isComplete(self):
return False
def complete(self, scope):
obj = None
try:
obj = scope._lookupIdentifier(self.name)
except:
raise WebIDLError("Unresolved type '%s'." % self.name,
[self.location])
assert obj
if obj.isType():
print obj
assert not obj.isType()
if obj.isTypedef():
assert self.name.name == obj.identifier.name
typedefType = IDLTypedefType(self.location, obj.innerType,
obj.identifier)
assert not typedefType.isComplete()
return typedefType.complete(scope)
elif obj.isCallback() and not obj.isInterface():
assert self.name.name == obj.identifier.name
return IDLCallbackType(self.location, obj)
if self._promiseInnerType and not self._promiseInnerType.isComplete():
self._promiseInnerType = self._promiseInnerType.complete(scope)
name = self.name.resolve(scope, None)
return IDLWrapperType(self.location, obj, self._promiseInnerType)
def isDistinguishableFrom(self, other):
raise TypeError("Can't tell whether an unresolved type is or is not "
"distinguishable from other things")
class IDLParameterizedType(IDLType):
def __init__(self, location, name, innerType):
IDLType.__init__(self, location, name)
self.builtin = False
self.inner = innerType
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def unroll(self):
return self.inner.unroll()
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLNullableType(IDLParameterizedType):
def __init__(self, location, innerType):
assert not innerType.isVoid()
assert not innerType == BuiltinTypes[IDLBuiltinType.Types.any]
name = innerType.name
if innerType.isComplete():
name += "OrNull"
IDLParameterizedType.__init__(self, location, name, innerType)
def __eq__(self, other):
return isinstance(other, IDLNullableType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "OrNull"
def nullable(self):
return True
def isCallback(self):
return self.inner.isCallback()
def isPrimitive(self):
return self.inner.isPrimitive()
def isBoolean(self):
return self.inner.isBoolean()
def isNumeric(self):
return self.inner.isNumeric()
def isString(self):
return self.inner.isString()
def isByteString(self):
return self.inner.isByteString()
def isDOMString(self):
return self.inner.isDOMString()
def isUSVString(self):
return self.inner.isUSVString()
def isFloat(self):
return self.inner.isFloat()
def isUnrestricted(self):
return self.inner.isUnrestricted()
def isInteger(self):
return self.inner.isInteger()
def isVoid(self):
return False
def isSequence(self):
return self.inner.isSequence()
def isMozMap(self):
return self.inner.isMozMap()
def isArray(self):
return self.inner.isArray()
def isArrayBuffer(self):
return self.inner.isArrayBuffer()
def isArrayBufferView(self):
return self.inner.isArrayBufferView()
def isSharedArrayBuffer(self):
return self.inner.isSharedArrayBuffer()
def isTypedArray(self):
return self.inner.isTypedArray()
def isDictionary(self):
return self.inner.isDictionary()
def isInterface(self):
return self.inner.isInterface()
def isPromise(self):
return self.inner.isPromise()
def isCallbackInterface(self):
return self.inner.isCallbackInterface()
def isNonCallbackInterface(self):
return self.inner.isNonCallbackInterface()
def isEnum(self):
return self.inner.isEnum()
def isUnion(self):
return self.inner.isUnion()
def isSerializable(self):
return self.inner.isSerializable()
def tag(self):
return self.inner.tag()
def complete(self, scope):
self.inner = self.inner.complete(scope)
if self.inner.nullable():
raise WebIDLError("The inner type of a nullable type must not be "
"a nullable type",
[self.location, self.inner.location])
if self.inner.isUnion():
if self.inner.hasNullableType:
raise WebIDLError("The inner type of a nullable type must not "
"be a union type that itself has a nullable "
"type as a member type", [self.location])
self.name = self.inner.name + "OrNull"
return self
def isDistinguishableFrom(self, other):
if (other.nullable() or (other.isUnion() and other.hasNullableType) or
other.isDictionary()):
# Can't tell which type null should become
return False
return self.inner.isDistinguishableFrom(other)
class IDLSequenceType(IDLParameterizedType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
IDLParameterizedType.__init__(self, location, parameterType.name, parameterType)
# Need to set self.name up front if our inner type is already complete,
# since in that case our .complete() won't be called.
if self.inner.isComplete():
self.name = self.inner.name + "Sequence"
def __eq__(self, other):
return isinstance(other, IDLSequenceType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "Sequence"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
return True
def isArray(self):
return False
def isDictionary(self):
return False
def isInterface(self):
return False
def isEnum(self):
return False
def isSerializable(self):
return self.inner.isSerializable()
def tag(self):
return IDLType.Tags.sequence
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name + "Sequence"
return self
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isInterface() or
other.isDictionary() or
other.isCallback() or other.isMozMap())
class IDLMozMapType(IDLParameterizedType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
IDLParameterizedType.__init__(self, location, parameterType.name, parameterType)
# Need to set self.name up front if our inner type is already complete,
# since in that case our .complete() won't be called.
if self.inner.isComplete():
self.name = self.inner.name + "MozMap"
def __eq__(self, other):
return isinstance(other, IDLMozMapType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "MozMap"
def isMozMap(self):
return True
def tag(self):
return IDLType.Tags.mozmap
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name + "MozMap"
return self
def unroll(self):
# We do not unroll our inner. Just stop at ourselves. That
# lets us add headers for both ourselves and our inner as
# needed.
return self
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isNonCallbackInterface() or other.isSequence())
def isExposedInAllOf(self, exposureSet):
return self.inner.unroll().isExposedInAllOf(exposureSet)
class IDLUnionType(IDLType):
def __init__(self, location, memberTypes):
IDLType.__init__(self, location, "")
self.memberTypes = memberTypes
self.hasNullableType = False
self._dictionaryType = None
self.flatMemberTypes = None
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLUnionType) and self.memberTypes == other.memberTypes
def __hash__(self):
assert self.isComplete()
return self.name.__hash__()
def isVoid(self):
return False
def isUnion(self):
return True
def isSerializable(self):
return all(m.isSerializable() for m in self.memberTypes)
def includesRestrictedFloat(self):
return any(t.includesRestrictedFloat() for t in self.memberTypes)
def tag(self):
return IDLType.Tags.union
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
for t in self.memberTypes:
t.resolveType(parentScope)
def isComplete(self):
return self.flatMemberTypes is not None
def complete(self, scope):
def typeName(type):
if isinstance(type, IDLNullableType):
return typeName(type.inner) + "OrNull"
if isinstance(type, IDLWrapperType):
return typeName(type._identifier.object())
if isinstance(type, IDLObjectWithIdentifier):
return typeName(type.identifier)
return type.name
for (i, type) in enumerate(self.memberTypes):
if not type.isComplete():
self.memberTypes[i] = type.complete(scope)
self.name = "Or".join(typeName(type) for type in self.memberTypes)
self.flatMemberTypes = list(self.memberTypes)
i = 0
while i < len(self.flatMemberTypes):
if self.flatMemberTypes[i].nullable():
if self.hasNullableType:
raise WebIDLError("Can't have more than one nullable types in a union",
[nullableType.location, self.flatMemberTypes[i].location])
if self.hasDictionaryType():
raise WebIDLError("Can't have a nullable type and a "
"dictionary type in a union",
[self._dictionaryType.location,
self.flatMemberTypes[i].location])
self.hasNullableType = True
nullableType = self.flatMemberTypes[i]
self.flatMemberTypes[i] = self.flatMemberTypes[i].inner
continue
if self.flatMemberTypes[i].isDictionary():
if self.hasNullableType:
raise WebIDLError("Can't have a nullable type and a "
"dictionary type in a union",
[nullableType.location,
self.flatMemberTypes[i].location])
self._dictionaryType = self.flatMemberTypes[i]
elif self.flatMemberTypes[i].isUnion():
self.flatMemberTypes[i:i + 1] = self.flatMemberTypes[i].memberTypes
continue
i += 1
for (i, t) in enumerate(self.flatMemberTypes[:-1]):
for u in self.flatMemberTypes[i + 1:]:
if not t.isDistinguishableFrom(u):
raise WebIDLError("Flat member types of a union should be "
"distinguishable, " + str(t) + " is not "
"distinguishable from " + str(u),
[self.location, t.location, u.location])
return self
def isDistinguishableFrom(self, other):
if self.hasNullableType and other.nullable():
# Can't tell which type null should become
return False
if other.isUnion():
otherTypes = other.unroll().memberTypes
else:
otherTypes = [other]
# For every type in otherTypes, check that it's distinguishable from
# every type in our types
for u in otherTypes:
if any(not t.isDistinguishableFrom(u) for t in self.memberTypes):
return False
return True
def isExposedInAllOf(self, exposureSet):
# We could have different member types in different globals. Just make sure that each thing in exposureSet has one of our member types exposed in it.
for globalName in exposureSet:
if not any(t.unroll().isExposedInAllOf(set([globalName])) for t
in self.flatMemberTypes):
return False
return True
def hasDictionaryType(self):
return self._dictionaryType is not None
def hasPossiblyEmptyDictionaryType(self):
return (self._dictionaryType is not None and
self._dictionaryType.inner.canBeEmpty())
def _getDependentObjects(self):
return set(self.memberTypes)
class IDLArrayType(IDLType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
if parameterType.isSequence():
raise WebIDLError("Array type cannot parameterize over a sequence type",
[location])
if parameterType.isMozMap():
raise WebIDLError("Array type cannot parameterize over a MozMap type",
[location])
if parameterType.isDictionary():
raise WebIDLError("Array type cannot parameterize over a dictionary type",
[location])
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLArrayType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "Array"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
assert not self.inner.isSequence()
return False
def isArray(self):
return True
def isDictionary(self):
assert not self.inner.isDictionary()
return False
def isInterface(self):
return False
def isEnum(self):
return False
def tag(self):
return IDLType.Tags.array
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name
if self.inner.isDictionary():
raise WebIDLError("Array type must not contain "
"dictionary as element type.",
[self.inner.location])
assert not self.inner.isSequence()
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isNonCallbackInterface())
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLTypedefType(IDLType):
def __init__(self, location, innerType, name):
IDLType.__init__(self, location, name)
self.inner = innerType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLTypedefType) and self.inner == other.inner
def __str__(self):
return self.name
def nullable(self):
return self.inner.nullable()
def isPrimitive(self):
return self.inner.isPrimitive()
def isBoolean(self):
return self.inner.isBoolean()
def isNumeric(self):
return self.inner.isNumeric()
def isString(self):
return self.inner.isString()
def isByteString(self):
return self.inner.isByteString()
def isDOMString(self):
return self.inner.isDOMString()
def isUSVString(self):
return self.inner.isUSVString()
def isVoid(self):
return self.inner.isVoid()
def isSequence(self):
return self.inner.isSequence()
def isMozMap(self):
return self.inner.isMozMap()
def isArray(self):
return self.inner.isArray()
def isDictionary(self):
return self.inner.isDictionary()
def isArrayBuffer(self):
return self.inner.isArrayBuffer()
def isArrayBufferView(self):
return self.inner.isArrayBufferView()
def isSharedArrayBuffer(self):
return self.inner.isSharedArrayBuffer()
def isTypedArray(self):
return self.inner.isTypedArray()
def isInterface(self):
return self.inner.isInterface()
def isCallbackInterface(self):
return self.inner.isCallbackInterface()
def isNonCallbackInterface(self):
return self.inner.isNonCallbackInterface()
def isComplete(self):
return False
def complete(self, parentScope):
if not self.inner.isComplete():
self.inner = self.inner.complete(parentScope)
assert self.inner.isComplete()
return self.inner
# Do we need a resolveType impl? I don't think it's particularly useful....
def tag(self):
return self.inner.tag()
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
return self.inner.isDistinguishableFrom(other)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLTypedef(IDLObjectWithIdentifier):
def __init__(self, location, parentScope, innerType, name):
identifier = IDLUnresolvedIdentifier(location, name)
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
self.innerType = innerType
def __str__(self):
return "Typedef %s %s" % (self.identifier.name, self.innerType)
def finish(self, parentScope):
if not self.innerType.isComplete():
self.innerType = self.innerType.complete(parentScope)
def validate(self):
pass
def isTypedef(self):
return True
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
return self.innerType._getDependentObjects()
class IDLWrapperType(IDLType):
def __init__(self, location, inner, promiseInnerType=None):
IDLType.__init__(self, location, inner.identifier.name)
self.inner = inner
self._identifier = inner.identifier
self.builtin = False
assert not promiseInnerType or inner.identifier.name == "Promise"
self._promiseInnerType = promiseInnerType
def __eq__(self, other):
return (isinstance(other, IDLWrapperType) and
self._identifier == other._identifier and
self.builtin == other.builtin)
def __str__(self):
return str(self.name) + " (Wrapper)"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
return False
def isArray(self):
return False
def isDictionary(self):
return isinstance(self.inner, IDLDictionary)
def isInterface(self):
return (isinstance(self.inner, IDLInterface) or
isinstance(self.inner, IDLExternalInterface))
def isCallbackInterface(self):
return self.isInterface() and self.inner.isCallback()
def isNonCallbackInterface(self):
return self.isInterface() and not self.inner.isCallback()
def isEnum(self):
return isinstance(self.inner, IDLEnum)
def isPromise(self):
return (isinstance(self.inner, IDLInterface) and
self.inner.identifier.name == "Promise")
def promiseInnerType(self):
assert self.isPromise()
return self._promiseInnerType
def isSerializable(self):
if self.isInterface():
if self.inner.isExternal():
return False
return any(m.isMethod() and m.isJsonifier() for m in self.inner.members)
elif self.isEnum():
return True
elif self.isDictionary():
return all(m.type.isSerializable() for m in self.inner.members)
else:
raise WebIDLError("IDLWrapperType wraps type %s that we don't know if "
"is serializable" % type(self.inner), [self.location])
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolve(parentScope)
def isComplete(self):
return True
def tag(self):
if self.isInterface():
return IDLType.Tags.interface
elif self.isEnum():
return IDLType.Tags.enum
elif self.isDictionary():
return IDLType.Tags.dictionary
else:
assert False
def isDistinguishableFrom(self, other):
if self.isPromise():
return False
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
assert self.isInterface() or self.isEnum() or self.isDictionary()
if self.isEnum():
return (other.isPrimitive() or other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isDictionary() and other.nullable():
return False
if (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isSequence()):
return True
if self.isDictionary():
return other.isNonCallbackInterface()
assert self.isInterface()
if other.isInterface():
if other.isSpiderMonkeyInterface():
# Just let |other| handle things
return other.isDistinguishableFrom(self)
assert self.isGeckoInterface() and other.isGeckoInterface()
if self.inner.isExternal() or other.unroll().inner.isExternal():
return self != other
return (len(self.inner.interfacesBasedOnSelf &
other.unroll().inner.interfacesBasedOnSelf) == 0 and
(self.isNonCallbackInterface() or
other.isNonCallbackInterface()))
if (other.isDictionary() or other.isCallback() or
other.isMozMap() or other.isArray()):
return self.isNonCallbackInterface()
# Not much else |other| can be
assert other.isObject()
return False
def isExposedInAllOf(self, exposureSet):
if not self.isInterface():
return True
iface = self.inner
if iface.isExternal():
# Let's say true, though ideally we'd only do this when
# exposureSet contains the primary global's name.
return True
if (self.isPromise() and
# Check the internal type
not self.promiseInnerType().unroll().isExposedInAllOf(exposureSet)):
return False
return iface.exposureSet.issuperset(exposureSet)
def _getDependentObjects(self):
# NB: The codegen for an interface type depends on
# a) That the identifier is in fact an interface (as opposed to
# a dictionary or something else).
# b) The native type of the interface.
# If we depend on the interface object we will also depend on
# anything the interface depends on which is undesirable. We
# considered implementing a dependency just on the interface type
# file, but then every modification to an interface would cause this
# to be regenerated which is still undesirable. We decided not to
# depend on anything, reasoning that:
# 1) Changing the concrete type of the interface requires modifying
# Bindings.conf, which is still a global dependency.
# 2) Changing an interface to a dictionary (or vice versa) with the
# same identifier should be incredibly rare.
#
# On the other hand, if our type is a dictionary, we should
# depend on it, because the member types of a dictionary
# affect whether a method taking the dictionary as an argument
# takes a JSContext* argument or not.
if self.isDictionary():
return set([self.inner])
return set()
class IDLBuiltinType(IDLType):
Types = enum(
# The integer types
'byte',
'octet',
'short',
'unsigned_short',
'long',
'unsigned_long',
'long_long',
'unsigned_long_long',
# Additional primitive types
'boolean',
'unrestricted_float',
'float',
'unrestricted_double',
# IMPORTANT: "double" must be the last primitive type listed
'double',
# Other types
'any',
'domstring',
'bytestring',
'usvstring',
'object',
'date',
'void',
# Funny stuff
'ArrayBuffer',
'ArrayBufferView',
'SharedArrayBuffer',
'Int8Array',
'Uint8Array',
'Uint8ClampedArray',
'Int16Array',
'Uint16Array',
'Int32Array',
'Uint32Array',
'Float32Array',
'Float64Array'
)
TagLookup = {
Types.byte: IDLType.Tags.int8,
Types.octet: IDLType.Tags.uint8,
Types.short: IDLType.Tags.int16,
Types.unsigned_short: IDLType.Tags.uint16,
Types.long: IDLType.Tags.int32,
Types.unsigned_long: IDLType.Tags.uint32,
Types.long_long: IDLType.Tags.int64,
Types.unsigned_long_long: IDLType.Tags.uint64,
Types.boolean: IDLType.Tags.bool,
Types.unrestricted_float: IDLType.Tags.unrestricted_float,
Types.float: IDLType.Tags.float,
Types.unrestricted_double: IDLType.Tags.unrestricted_double,
Types.double: IDLType.Tags.double,
Types.any: IDLType.Tags.any,
Types.domstring: IDLType.Tags.domstring,
Types.bytestring: IDLType.Tags.bytestring,
Types.usvstring: IDLType.Tags.usvstring,
Types.object: IDLType.Tags.object,
Types.date: IDLType.Tags.date,
Types.void: IDLType.Tags.void,
Types.ArrayBuffer: IDLType.Tags.interface,
Types.ArrayBufferView: IDLType.Tags.interface,
Types.SharedArrayBuffer: IDLType.Tags.interface,
Types.Int8Array: IDLType.Tags.interface,
Types.Uint8Array: IDLType.Tags.interface,
Types.Uint8ClampedArray: IDLType.Tags.interface,
Types.Int16Array: IDLType.Tags.interface,
Types.Uint16Array: IDLType.Tags.interface,
Types.Int32Array: IDLType.Tags.interface,
Types.Uint32Array: IDLType.Tags.interface,
Types.Float32Array: IDLType.Tags.interface,
Types.Float64Array: IDLType.Tags.interface
}
def __init__(self, location, name, type):
IDLType.__init__(self, location, name)
self.builtin = True
self._typeTag = type
def isPrimitive(self):
return self._typeTag <= IDLBuiltinType.Types.double
def isBoolean(self):
return self._typeTag == IDLBuiltinType.Types.boolean
def isNumeric(self):
return self.isPrimitive() and not self.isBoolean()
def isString(self):
return (self._typeTag == IDLBuiltinType.Types.domstring or
self._typeTag == IDLBuiltinType.Types.bytestring or
self._typeTag == IDLBuiltinType.Types.usvstring)
def isByteString(self):
return self._typeTag == IDLBuiltinType.Types.bytestring
def isDOMString(self):
return self._typeTag == IDLBuiltinType.Types.domstring
def isUSVString(self):
return self._typeTag == IDLBuiltinType.Types.usvstring
def isInteger(self):
return self._typeTag <= IDLBuiltinType.Types.unsigned_long_long
def isArrayBuffer(self):
return self._typeTag == IDLBuiltinType.Types.ArrayBuffer
def isArrayBufferView(self):
return self._typeTag == IDLBuiltinType.Types.ArrayBufferView
def isSharedArrayBuffer(self):
return self._typeTag == IDLBuiltinType.Types.SharedArrayBuffer
def isTypedArray(self):
return (self._typeTag >= IDLBuiltinType.Types.Int8Array and
self._typeTag <= IDLBuiltinType.Types.Float64Array)
def isInterface(self):
# TypedArray things are interface types per the TypedArray spec,
# but we handle them as builtins because SpiderMonkey implements
# all of it internally.
return (self.isArrayBuffer() or
self.isArrayBufferView() or
self.isSharedArrayBuffer() or
self.isTypedArray())
def isNonCallbackInterface(self):
# All the interfaces we can be are non-callback
return self.isInterface()
def isFloat(self):
return (self._typeTag == IDLBuiltinType.Types.float or
self._typeTag == IDLBuiltinType.Types.double or
self._typeTag == IDLBuiltinType.Types.unrestricted_float or
self._typeTag == IDLBuiltinType.Types.unrestricted_double)
def isUnrestricted(self):
assert self.isFloat()
return (self._typeTag == IDLBuiltinType.Types.unrestricted_float or
self._typeTag == IDLBuiltinType.Types.unrestricted_double)
def isSerializable(self):
return self.isPrimitive() or self.isString() or self.isDate()
def includesRestrictedFloat(self):
return self.isFloat() and not self.isUnrestricted()
def tag(self):
return IDLBuiltinType.TagLookup[self._typeTag]
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
if self.isBoolean():
return (other.isNumeric() or other.isString() or other.isEnum() or
other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isNumeric():
return (other.isBoolean() or other.isString() or other.isEnum() or
other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isString():
return (other.isPrimitive() or other.isInterface() or
other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isAny():
# Can't tell "any" apart from anything
return False
if self.isObject():
return other.isPrimitive() or other.isString() or other.isEnum()
if self.isDate():
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isInterface() or other.isCallback() or
other.isDictionary() or other.isSequence() or
other.isMozMap() or other.isArray())
if self.isVoid():
return not other.isVoid()
# Not much else we could be!
assert self.isSpiderMonkeyInterface()
# Like interfaces, but we know we're not a callback
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate() or
(other.isInterface() and (
# ArrayBuffer is distinguishable from everything
# that's not an ArrayBuffer or a callback interface
(self.isArrayBuffer() and not other.isArrayBuffer()) or
(self.isSharedArrayBuffer() and not other.isSharedArrayBuffer()) or
# ArrayBufferView is distinguishable from everything
# that's not an ArrayBufferView or typed array.
(self.isArrayBufferView() and not other.isArrayBufferView() and
not other.isTypedArray()) or
# Typed arrays are distinguishable from everything
# except ArrayBufferView and the same type of typed
# array
(self.isTypedArray() and not other.isArrayBufferView() and not
(other.isTypedArray() and other.name == self.name)))))
def _getDependentObjects(self):
return set()
BuiltinTypes = {
IDLBuiltinType.Types.byte:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Byte",
IDLBuiltinType.Types.byte),
IDLBuiltinType.Types.octet:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Octet",
IDLBuiltinType.Types.octet),
IDLBuiltinType.Types.short:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Short",
IDLBuiltinType.Types.short),
IDLBuiltinType.Types.unsigned_short:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedShort",
IDLBuiltinType.Types.unsigned_short),
IDLBuiltinType.Types.long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Long",
IDLBuiltinType.Types.long),
IDLBuiltinType.Types.unsigned_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedLong",
IDLBuiltinType.Types.unsigned_long),
IDLBuiltinType.Types.long_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "LongLong",
IDLBuiltinType.Types.long_long),
IDLBuiltinType.Types.unsigned_long_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedLongLong",
IDLBuiltinType.Types.unsigned_long_long),
IDLBuiltinType.Types.boolean:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Boolean",
IDLBuiltinType.Types.boolean),
IDLBuiltinType.Types.float:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float",
IDLBuiltinType.Types.float),
IDLBuiltinType.Types.unrestricted_float:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnrestrictedFloat",
IDLBuiltinType.Types.unrestricted_float),
IDLBuiltinType.Types.double:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Double",
IDLBuiltinType.Types.double),
IDLBuiltinType.Types.unrestricted_double:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnrestrictedDouble",
IDLBuiltinType.Types.unrestricted_double),
IDLBuiltinType.Types.any:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Any",
IDLBuiltinType.Types.any),
IDLBuiltinType.Types.domstring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "String",
IDLBuiltinType.Types.domstring),
IDLBuiltinType.Types.bytestring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ByteString",
IDLBuiltinType.Types.bytestring),
IDLBuiltinType.Types.usvstring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "USVString",
IDLBuiltinType.Types.usvstring),
IDLBuiltinType.Types.object:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Object",
IDLBuiltinType.Types.object),
IDLBuiltinType.Types.date:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Date",
IDLBuiltinType.Types.date),
IDLBuiltinType.Types.void:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Void",
IDLBuiltinType.Types.void),
IDLBuiltinType.Types.ArrayBuffer:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ArrayBuffer",
IDLBuiltinType.Types.ArrayBuffer),
IDLBuiltinType.Types.ArrayBufferView:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ArrayBufferView",
IDLBuiltinType.Types.ArrayBufferView),
IDLBuiltinType.Types.SharedArrayBuffer:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedArrayBuffer",
IDLBuiltinType.Types.SharedArrayBuffer),
IDLBuiltinType.Types.Int8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int8Array",
IDLBuiltinType.Types.Int8Array),
IDLBuiltinType.Types.Uint8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint8Array",
IDLBuiltinType.Types.Uint8Array),
IDLBuiltinType.Types.Uint8ClampedArray:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint8ClampedArray",
IDLBuiltinType.Types.Uint8ClampedArray),
IDLBuiltinType.Types.Int16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int16Array",
IDLBuiltinType.Types.Int16Array),
IDLBuiltinType.Types.Uint16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint16Array",
IDLBuiltinType.Types.Uint16Array),
IDLBuiltinType.Types.Int32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int32Array",
IDLBuiltinType.Types.Int32Array),
IDLBuiltinType.Types.Uint32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint32Array",
IDLBuiltinType.Types.Uint32Array),
IDLBuiltinType.Types.Float32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float32Array",
IDLBuiltinType.Types.Float32Array),
IDLBuiltinType.Types.Float64Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float64Array",
IDLBuiltinType.Types.Float64Array)
}
integerTypeSizes = {
IDLBuiltinType.Types.byte: (-128, 127),
IDLBuiltinType.Types.octet: (0, 255),
IDLBuiltinType.Types.short: (-32768, 32767),
IDLBuiltinType.Types.unsigned_short: (0, 65535),
IDLBuiltinType.Types.long: (-2147483648, 2147483647),
IDLBuiltinType.Types.unsigned_long: (0, 4294967295),
IDLBuiltinType.Types.long_long: (-9223372036854775808, 9223372036854775807),
IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615)
}
def matchIntegerValueToType(value):
for type, extremes in integerTypeSizes.items():
(min, max) = extremes
if value <= max and value >= min:
return BuiltinTypes[type]
return None
class IDLValue(IDLObject):
def __init__(self, location, type, value):
IDLObject.__init__(self, location)
self.type = type
assert isinstance(type, IDLType)
self.value = value
def coerceToType(self, type, location):
if type == self.type:
return self # Nothing to do
# We first check for unions to ensure that even if the union is nullable
# we end up with the right flat member type, not the union's type.
if type.isUnion():
# We use the flat member types here, because if we have a nullable
# member type, or a nested union, we want the type the value
# actually coerces to, not the nullable or nested union type.
for subtype in type.unroll().flatMemberTypes:
try:
coercedValue = self.coerceToType(subtype, location)
# Create a new IDLValue to make sure that we have the
# correct float/double type. This is necessary because we
# use the value's type when it is a default value of a
# union, and the union cares about the exact float type.
return IDLValue(self.location, subtype, coercedValue.value)
except:
pass
# If the type allows null, rerun this matching on the inner type, except
# nullable enums. We handle those specially, because we want our
# default string values to stay strings even when assigned to a nullable
# enum.
elif type.nullable() and not type.isEnum():
innerValue = self.coerceToType(type.inner, location)
return IDLValue(self.location, type, innerValue.value)
elif self.type.isInteger() and type.isInteger():
# We're both integer types. See if we fit.
(min, max) = integerTypeSizes[type._typeTag]
if self.value <= max and self.value >= min:
# Promote
return IDLValue(self.location, type, self.value)
else:
raise WebIDLError("Value %s is out of range for type %s." %
(self.value, type), [location])
elif self.type.isInteger() and type.isFloat():
# Convert an integer literal into float
if -2**24 <= self.value <= 2**24:
return IDLValue(self.location, type, float(self.value))
else:
raise WebIDLError("Converting value %s to %s will lose precision." %
(self.value, type), [location])
elif self.type.isString() and type.isEnum():
# Just keep our string, but make sure it's a valid value for this enum
enum = type.unroll().inner
if self.value not in enum.values():
raise WebIDLError("'%s' is not a valid default value for enum %s"
% (self.value, enum.identifier.name),
[location, enum.location])
return self
elif self.type.isFloat() and type.isFloat():
if (not type.isUnrestricted() and
(self.value == float("inf") or self.value == float("-inf") or
math.isnan(self.value))):
raise WebIDLError("Trying to convert unrestricted value %s to non-unrestricted"
% self.value, [location])
return IDLValue(self.location, type, self.value)
elif self.type.isString() and type.isUSVString():
# Allow USVStrings to use default value just like
# DOMString. No coercion is required in this case as Codegen.py
# treats USVString just like DOMString, but with an
# extra normalization step.
assert self.type.isDOMString()
return self
raise WebIDLError("Cannot coerce type %s to type %s." %
(self.type, type), [location])
def _getDependentObjects(self):
return set()
class IDLNullValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if (not isinstance(type, IDLNullableType) and
not (type.isUnion() and type.hasNullableType) and
not (type.isUnion() and type.hasDictionaryType()) and
not type.isDictionary() and
not type.isAny()):
raise WebIDLError("Cannot coerce null value to type %s." % type,
[location])
nullValue = IDLNullValue(self.location)
if type.isUnion() and not type.nullable() and type.hasDictionaryType():
# We're actually a default value for the union's dictionary member.
# Use its type.
for t in type.flatMemberTypes:
if t.isDictionary():
nullValue.type = t
return nullValue
nullValue.type = type
return nullValue
def _getDependentObjects(self):
return set()
class IDLEmptySequenceValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if type.isUnion():
# We use the flat member types here, because if we have a nullable
# member type, or a nested union, we want the type the value
# actually coerces to, not the nullable or nested union type.
for subtype in type.unroll().flatMemberTypes:
try:
return self.coerceToType(subtype, location)
except:
pass
if not type.isSequence():
raise WebIDLError("Cannot coerce empty sequence value to type %s." % type,
[location])
emptySequenceValue = IDLEmptySequenceValue(self.location)
emptySequenceValue.type = type
return emptySequenceValue
def _getDependentObjects(self):
return set()
class IDLUndefinedValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if not type.isAny():
raise WebIDLError("Cannot coerce undefined value to type %s." % type,
[location])
undefinedValue = IDLUndefinedValue(self.location)
undefinedValue.type = type
return undefinedValue
def _getDependentObjects(self):
return set()
class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins):
Tags = enum(
'Const',
'Attr',
'Method',
'MaplikeOrSetlike',
'Iterable'
)
Special = enum(
'Static',
'Stringifier'
)
AffectsValues = ("Nothing", "Everything")
DependsOnValues = ("Nothing", "DOMState", "DeviceState", "Everything")
def __init__(self, location, identifier, tag, extendedAttrDict=None):
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
IDLExposureMixins.__init__(self, location)
self.tag = tag
if extendedAttrDict is None:
self._extendedAttrDict = {}
else:
self._extendedAttrDict = extendedAttrDict
def isMethod(self):
return self.tag == IDLInterfaceMember.Tags.Method
def isAttr(self):
return self.tag == IDLInterfaceMember.Tags.Attr
def isConst(self):
return self.tag == IDLInterfaceMember.Tags.Const
def isMaplikeOrSetlikeOrIterable(self):
return (self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike or
self.tag == IDLInterfaceMember.Tags.Iterable)
def isMaplikeOrSetlike(self):
return self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike
def addExtendedAttributes(self, attrs):
for attr in attrs:
self.handleExtendedAttribute(attr)
attrlist = attr.listValue()
self._extendedAttrDict[attr.identifier()] = attrlist if len(attrlist) else True
def handleExtendedAttribute(self, attr):
pass
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def finish(self, scope):
# We better be exposed _somewhere_.
if (len(self._exposureGlobalNames) == 0):
print self.identifier.name
assert len(self._exposureGlobalNames) != 0
IDLExposureMixins.finish(self, scope)
def validate(self):
for attribute in ["CheckAnyPermissions", "CheckAllPermissions"]:
if (self.getExtendedAttribute(attribute) and
self.exposureSet != set([self._globalScope.primaryGlobalName])):
raise WebIDLError("[%s] used on an interface member that is "
"not %s-only" %
(attribute, self.parentScope.primaryGlobalName),
[self.location])
if self.isAttr() or self.isMethod():
if self.affects == "Everything" and self.dependsOn != "Everything":
raise WebIDLError("Interface member is flagged as affecting "
"everything but not depending on everything. "
"That seems rather unlikely.",
[self.location])
if self.getExtendedAttribute("NewObject"):
if self.dependsOn == "Nothing" or self.dependsOn == "DOMState":
raise WebIDLError("A [NewObject] method is not idempotent, "
"so it has to depend on something other than DOM state.",
[self.location])
def _setDependsOn(self, dependsOn):
if self.dependsOn != "Everything":
raise WebIDLError("Trying to specify multiple different DependsOn, "
"Pure, or Constant extended attributes for "
"attribute", [self.location])
if dependsOn not in IDLInterfaceMember.DependsOnValues:
raise WebIDLError("Invalid [DependsOn=%s] on attribute" % dependsOn,
[self.location])
self.dependsOn = dependsOn
def _setAffects(self, affects):
if self.affects != "Everything":
raise WebIDLError("Trying to specify multiple different Affects, "
"Pure, or Constant extended attributes for "
"attribute", [self.location])
if affects not in IDLInterfaceMember.AffectsValues:
raise WebIDLError("Invalid [Affects=%s] on attribute" % dependsOn,
[self.location])
self.affects = affects
def _addAlias(self, alias):
if alias in self.aliases:
raise WebIDLError("Duplicate [Alias=%s] on attribute" % alias,
[self.location])
self.aliases.append(alias)
class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember):
def __init__(self, location, identifier, ifaceType, keyType, valueType, ifaceKind):
IDLInterfaceMember.__init__(self, location, identifier, ifaceKind)
if keyType is not None:
assert isinstance(keyType, IDLType)
else:
assert valueType is not None
assert ifaceType in ['maplike', 'setlike', 'iterable']
if valueType is not None:
assert isinstance(valueType, IDLType)
self.keyType = keyType
self.valueType = valueType
self.maplikeOrSetlikeOrIterableType = ifaceType
self.disallowedMemberNames = []
self.disallowedNonMethodNames = []
def isMaplike(self):
return self.maplikeOrSetlikeOrIterableType == "maplike"
def isSetlike(self):
return self.maplikeOrSetlikeOrIterableType == "setlike"
def isIterable(self):
return self.maplikeOrSetlikeOrIterableType == "iterable"
def hasKeyType(self):
return self.keyType is not None
def hasValueType(self):
return self.valueType is not None
def checkCollisions(self, members, isAncestor):
for member in members:
# Check that there are no disallowed members
if (member.identifier.name in self.disallowedMemberNames and
not ((member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod()) or
(member.isAttr() and member.isMaplikeOrSetlikeAttr()))):
raise WebIDLError("Member '%s' conflicts "
"with reserved %s name." %
(member.identifier.name,
self.maplikeOrSetlikeOrIterableType),
[self.location, member.location])
# Check that there are no disallowed non-method members
if (isAncestor or (member.isAttr() or member.isConst()) and
member.identifier.name in self.disallowedNonMethodNames):
raise WebIDLError("Member '%s' conflicts "
"with reserved %s method." %
(member.identifier.name,
self.maplikeOrSetlikeOrIterableType),
[self.location, member.location])
def addMethod(self, name, members, allowExistingOperations, returnType, args=[],
chromeOnly=False, isPure=False, affectsNothing=False, newObject=False,
isIteratorAlias=False):
"""
Create an IDLMethod based on the parameters passed in.
- members is the member list to add this function to, since this is
called during the member expansion portion of interface object
building.
- chromeOnly is only True for read-only js implemented classes, to
implement underscore prefixed convenience functions which would
otherwise not be available, unlike the case of C++ bindings.
- isPure is only True for idempotent functions, so it is not valid for
things like keys, values, etc. that return a new object every time.
- affectsNothing means that nothing changes due to this method, which
affects JIT optimization behavior
- newObject means the method creates and returns a new object.
"""
# Only add name to lists for collision checks if it's not chrome
# only.
if chromeOnly:
name = "__" + name
else:
if not allowExistingOperations:
self.disallowedMemberNames.append(name)
else:
self.disallowedNonMethodNames.append(name)
# If allowExistingOperations is True, and another operation exists
# with the same name as the one we're trying to add, don't add the
# maplike/setlike operation. However, if the operation is static,
# then fail by way of creating the function, which will cause a
# naming conflict, per the spec.
if allowExistingOperations:
for m in members:
if m.identifier.name == name and m.isMethod() and not m.isStatic():
return
method = IDLMethod(self.location,
IDLUnresolvedIdentifier(self.location, name, allowDoubleUnderscore=chromeOnly),
returnType, args, maplikeOrSetlikeOrIterable=self)
# We need to be able to throw from declaration methods
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Throws",))])
if chromeOnly:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("ChromeOnly",))])
if isPure:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Pure",))])
# Following attributes are used for keys/values/entries. Can't mark
# them pure, since they return a new object each time they are run.
if affectsNothing:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("DependsOn", "Everything")),
IDLExtendedAttribute(self.location, ("Affects", "Nothing"))])
if newObject:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("NewObject",))])
if isIteratorAlias:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Alias", "@@iterator"))])
members.append(method)
def resolve(self, parentScope):
if self.keyType:
self.keyType.resolveType(parentScope)
if self.valueType:
self.valueType.resolveType(parentScope)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if self.keyType and not self.keyType.isComplete():
t = self.keyType.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.keyType = t
if self.valueType and not self.valueType.isComplete():
t = self.valueType.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.valueType = t
def validate(self):
IDLInterfaceMember.validate(self)
def handleExtendedAttribute(self, attr):
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def _getDependentObjects(self):
deps = set()
if self.keyType:
deps.add(self.keyType)
if self.valueType:
deps.add(self.valueType)
return deps
def getForEachArguments(self):
return [IDLArgument(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"callback"),
BuiltinTypes[IDLBuiltinType.Types.object]),
IDLArgument(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"thisArg"),
BuiltinTypes[IDLBuiltinType.Types.any],
optional=True)]
# Iterable adds ES6 iterator style functions and traits
# (keys/values/entries/@@iterator) to an interface.
class IDLIterable(IDLMaplikeOrSetlikeOrIterableBase):
def __init__(self, location, identifier, keyType, valueType=None, scope=None):
IDLMaplikeOrSetlikeOrIterableBase.__init__(self, location, identifier,
"iterable", keyType, valueType,
IDLInterfaceMember.Tags.Iterable)
self.iteratorType = None
def __str__(self):
return "declared iterable with key '%s' and value '%s'" % (self.keyType, self.valueType)
def expand(self, members, isJSImplemented):
"""
In order to take advantage of all of the method machinery in Codegen,
we generate our functions as if they were part of the interface
specification during parsing.
"""
# We only need to add entries/keys/values here if we're a pair iterator.
# Value iterators just copy these from %ArrayPrototype% instead.
if not self.isPairIterator():
return
# object entries()
self.addMethod("entries", members, False, self.iteratorType,
affectsNothing=True, newObject=True,
isIteratorAlias=True)
# object keys()
self.addMethod("keys", members, False, self.iteratorType,
affectsNothing=True, newObject=True)
# object values()
self.addMethod("values", members, False, self.iteratorType,
affectsNothing=True, newObject=True)
# void forEach(callback(valueType, keyType), optional any thisArg)
self.addMethod("forEach", members, False,
BuiltinTypes[IDLBuiltinType.Types.void],
self.getForEachArguments())
def isValueIterator(self):
return not self.isPairIterator()
def isPairIterator(self):
return self.hasKeyType()
# MaplikeOrSetlike adds ES6 map-or-set-like traits to an interface.
class IDLMaplikeOrSetlike(IDLMaplikeOrSetlikeOrIterableBase):
def __init__(self, location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType):
IDLMaplikeOrSetlikeOrIterableBase.__init__(self, location, identifier, maplikeOrSetlikeType,
keyType, valueType, IDLInterfaceMember.Tags.MaplikeOrSetlike)
self.readonly = readonly
self.slotIndices = None
# When generating JSAPI access code, we need to know the backing object
# type prefix to create the correct function. Generate here for reuse.
if self.isMaplike():
self.prefix = 'Map'
elif self.isSetlike():
self.prefix = 'Set'
def __str__(self):
return "declared '%s' with key '%s'" % (self.maplikeOrSetlikeOrIterableType, self.keyType)
def expand(self, members, isJSImplemented):
"""
In order to take advantage of all of the method machinery in Codegen,
we generate our functions as if they were part of the interface
specification during parsing.
"""
# Both maplike and setlike have a size attribute
members.append(IDLAttribute(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"), "size"),
BuiltinTypes[IDLBuiltinType.Types.unsigned_long],
True,
maplikeOrSetlike=self))
self.reserved_ro_names = ["size"]
# object entries()
self.addMethod("entries", members, False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True, isIteratorAlias=self.isMaplike())
# object keys()
self.addMethod("keys", members, False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# object values()
self.addMethod("values", members, False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True, isIteratorAlias=self.isSetlike())
# void forEach(callback(valueType, keyType), thisVal)
self.addMethod("forEach", members, False, BuiltinTypes[IDLBuiltinType.Types.void],
self.getForEachArguments())
def getKeyArg():
return IDLArgument(self.location,
IDLUnresolvedIdentifier(self.location, "key"),
self.keyType)
# boolean has(keyType key)
self.addMethod("has", members, False, BuiltinTypes[IDLBuiltinType.Types.boolean],
[getKeyArg()], isPure=True)
if not self.readonly:
# void clear()
self.addMethod("clear", members, True, BuiltinTypes[IDLBuiltinType.Types.void],
[])
# boolean delete(keyType key)
self.addMethod("delete", members, True,
BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()])
# Always generate underscored functions (e.g. __add, __clear) for js
# implemented interfaces as convenience functions.
if isJSImplemented:
# void clear()
self.addMethod("clear", members, True, BuiltinTypes[IDLBuiltinType.Types.void],
[], chromeOnly=True)
# boolean delete(keyType key)
self.addMethod("delete", members, True,
BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()],
chromeOnly=True)
if self.isSetlike():
if not self.readonly:
# Add returns the set object it just added to.
# object add(keyType key)
self.addMethod("add", members, True,
BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()])
if isJSImplemented:
self.addMethod("add", members, True,
BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()],
chromeOnly=True)
return
# If we get this far, we're a maplike declaration.
# valueType get(keyType key)
#
# Note that instead of the value type, we're using any here. The
# validity checks should happen as things are inserted into the map,
# and using any as the return type makes code generation much simpler.
#
# TODO: Bug 1155340 may change this to use specific type to provide
# more info to JIT.
self.addMethod("get", members, False, BuiltinTypes[IDLBuiltinType.Types.any],
[getKeyArg()], isPure=True)
def getValueArg():
return IDLArgument(self.location,
IDLUnresolvedIdentifier(self.location, "value"),
self.valueType)
if not self.readonly:
self.addMethod("set", members, True, BuiltinTypes[IDLBuiltinType.Types.object],
[getKeyArg(), getValueArg()])
if isJSImplemented:
self.addMethod("set", members, True, BuiltinTypes[IDLBuiltinType.Types.object],
[getKeyArg(), getValueArg()], chromeOnly=True)
class IDLConst(IDLInterfaceMember):
def __init__(self, location, identifier, type, value):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Const)
assert isinstance(type, IDLType)
if type.isDictionary():
raise WebIDLError("A constant cannot be of a dictionary type",
[self.location])
self.type = type
self.value = value
if identifier.name == "prototype":
raise WebIDLError("The identifier of a constant must not be 'prototype'",
[location])
def __str__(self):
return "'%s' const '%s'" % (self.type, self.identifier)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.type.isComplete():
type = self.type.complete(scope)
if not type.isPrimitive() and not type.isString():
locations = [self.type.location, type.location]
try:
locations.append(type.inner.location)
except:
pass
raise WebIDLError("Incorrect type for constant", locations)
self.type = type
# The value might not match the type
coercedValue = self.value.coerceToType(self.type, self.location)
assert coercedValue
self.value = coercedValue
def validate(self):
IDLInterfaceMember.validate(self)
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif (identifier == "Pref" or
identifier == "ChromeOnly" or
identifier == "Func" or
identifier == "SecureContext" or
identifier == "AvailableIn" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on constant" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def _getDependentObjects(self):
return set([self.type, self.value])
class IDLAttribute(IDLInterfaceMember):
def __init__(self, location, identifier, type, readonly, inherit=False,
static=False, stringifier=False, maplikeOrSetlike=None,
extendedAttrDict=None, navigatorObjectGetter=False):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Attr,
extendedAttrDict=extendedAttrDict)
assert isinstance(type, IDLType)
self.type = type
self.readonly = readonly
self.inherit = inherit
self._static = static
self.lenientThis = False
self._unforgeable = False
self.stringifier = stringifier
self.enforceRange = False
self.clamp = False
self.slotIndices = None
assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike)
self.maplikeOrSetlike = maplikeOrSetlike
self.dependsOn = "Everything"
self.affects = "Everything"
self.navigatorObjectGetter = navigatorObjectGetter
if static and identifier.name == "prototype":
raise WebIDLError("The identifier of a static attribute must not be 'prototype'",
[location])
if readonly and inherit:
raise WebIDLError("An attribute cannot be both 'readonly' and 'inherit'",
[self.location])
def isStatic(self):
return self._static
def forceStatic(self):
self._static = True
def __str__(self):
return "'%s' attribute '%s'" % (self.type, self.identifier)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.type.isComplete():
t = self.type.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.type = t
if self.type.isDictionary() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("An attribute cannot be of a dictionary type",
[self.location])
if self.type.isSequence() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("A non-cached attribute cannot be of a sequence "
"type", [self.location])
if self.type.isMozMap() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("A non-cached attribute cannot be of a MozMap "
"type", [self.location])
if self.type.isUnion():
for f in self.type.unroll().flatMemberTypes:
if f.isDictionary():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a dictionary "
"type", [self.location, f.location])
if f.isSequence():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a sequence "
"type", [self.location, f.location])
if f.isMozMap():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a MozMap "
"type", [self.location, f.location])
if not self.type.isInterface() and self.getExtendedAttribute("PutForwards"):
raise WebIDLError("An attribute with [PutForwards] must have an "
"interface type as its type", [self.location])
if not self.type.isInterface() and self.getExtendedAttribute("SameObject"):
raise WebIDLError("An attribute with [SameObject] must have an "
"interface type as its type", [self.location])
def validate(self):
def typeContainsChromeOnlyDictionaryMember(type):
if (type.nullable() or
type.isSequence() or
type.isMozMap()):
return typeContainsChromeOnlyDictionaryMember(type.inner)
if type.isUnion():
for memberType in type.flatMemberTypes:
(contains, location) = typeContainsChromeOnlyDictionaryMember(memberType)
if contains:
return (True, location)
if type.isDictionary():
dictionary = type.inner
while dictionary:
(contains, location) = dictionaryContainsChromeOnlyMember(dictionary)
if contains:
return (True, location)
dictionary = dictionary.parent
return (False, None)
def dictionaryContainsChromeOnlyMember(dictionary):
for member in dictionary.members:
if member.getExtendedAttribute("ChromeOnly"):
return (True, member.location)
(contains, location) = typeContainsChromeOnlyDictionaryMember(member.type)
if contains:
return (True, location)
return (False, None)
IDLInterfaceMember.validate(self)
if (self.getExtendedAttribute("Cached") or
self.getExtendedAttribute("StoreInSlot")):
if not self.affects == "Nothing":
raise WebIDLError("Cached attributes and attributes stored in "
"slots must be Constant or Pure or "
"Affects=Nothing, since the getter won't always "
"be called.",
[self.location])
(contains, location) = typeContainsChromeOnlyDictionaryMember(self.type)
if contains:
raise WebIDLError("[Cached] and [StoreInSlot] must not be used "
"on an attribute whose type contains a "
"[ChromeOnly] dictionary member",
[self.location, location])
if self.getExtendedAttribute("Frozen"):
if (not self.type.isSequence() and not self.type.isDictionary() and
not self.type.isMozMap()):
raise WebIDLError("[Frozen] is only allowed on "
"sequence-valued, dictionary-valued, and "
"MozMap-valued attributes",
[self.location])
if not self.type.unroll().isExposedInAllOf(self.exposureSet):
raise WebIDLError("Attribute returns a type that is not exposed "
"everywhere where the attribute is exposed",
[self.location])
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "SetterThrows" and self.readonly:
raise WebIDLError("Readonly attributes must not be flagged as "
"[SetterThrows]",
[self.location])
elif (((identifier == "Throws" or identifier == "GetterThrows") and
self.getExtendedAttribute("StoreInSlot")) or
(identifier == "StoreInSlot" and
(self.getExtendedAttribute("Throws") or
self.getExtendedAttribute("GetterThrows")))):
raise WebIDLError("Throwing things can't be [StoreInSlot]",
[attr.location])
elif identifier == "LenientThis":
if not attr.noArguments():
raise WebIDLError("[LenientThis] must take no arguments",
[attr.location])
if self.isStatic():
raise WebIDLError("[LenientThis] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("CrossOriginReadable"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [CrossOriginReadable]",
[attr.location, self.location])
if self.getExtendedAttribute("CrossOriginWritable"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [CrossOriginWritable]",
[attr.location, self.location])
self.lenientThis = True
elif identifier == "Unforgeable":
if self.isStatic():
raise WebIDLError("[Unforgeable] is only allowed on non-static "
"attributes", [attr.location, self.location])
self._unforgeable = True
elif identifier == "SameObject" and not self.readonly:
raise WebIDLError("[SameObject] only allowed on readonly attributes",
[attr.location, self.location])
elif identifier == "Constant" and not self.readonly:
raise WebIDLError("[Constant] only allowed on readonly attributes",
[attr.location, self.location])
elif identifier == "PutForwards":
if not self.readonly:
raise WebIDLError("[PutForwards] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[PutForwards] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("Replaceable") is not None:
raise WebIDLError("[PutForwards] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
if not attr.hasValue():
raise WebIDLError("[PutForwards] takes an identifier",
[attr.location, self.location])
elif identifier == "Replaceable":
if not attr.noArguments():
raise WebIDLError("[Replaceable] must take no arguments",
[attr.location])
if not self.readonly:
raise WebIDLError("[Replaceable] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[Replaceable] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("PutForwards") is not None:
raise WebIDLError("[PutForwards] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
elif identifier == "LenientSetter":
if not attr.noArguments():
raise WebIDLError("[LenientSetter] must take no arguments",
[attr.location])
if not self.readonly:
raise WebIDLError("[LenientSetter] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[LenientSetter] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("PutForwards") is not None:
raise WebIDLError("[LenientSetter] and [PutForwards] can't both "
"appear on the same attribute",
[attr.location, self.location])
if self.getExtendedAttribute("Replaceable") is not None:
raise WebIDLError("[LenientSetter] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
elif identifier == "LenientFloat":
if self.readonly:
raise WebIDLError("[LenientFloat] used on a readonly attribute",
[attr.location, self.location])
if not self.type.includesRestrictedFloat():
raise WebIDLError("[LenientFloat] used on an attribute with a "
"non-restricted-float type",
[attr.location, self.location])
elif identifier == "EnforceRange":
if self.readonly:
raise WebIDLError("[EnforceRange] used on a readonly attribute",
[attr.location, self.location])
self.enforceRange = True
elif identifier == "Clamp":
if self.readonly:
raise WebIDLError("[Clamp] used on a readonly attribute",
[attr.location, self.location])
self.clamp = True
elif identifier == "StoreInSlot":
if self.getExtendedAttribute("Cached"):
raise WebIDLError("[StoreInSlot] and [Cached] must not be "
"specified on the same attribute",
[attr.location, self.location])
elif identifier == "Cached":
if self.getExtendedAttribute("StoreInSlot"):
raise WebIDLError("[Cached] and [StoreInSlot] must not be "
"specified on the same attribute",
[attr.location, self.location])
elif (identifier == "CrossOriginReadable" or
identifier == "CrossOriginWritable"):
if not attr.noArguments() and identifier == "CrossOriginReadable":
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
if self.isStatic():
raise WebIDLError("[%s] is only allowed on non-static "
"attributes" % identifier,
[attr.location, self.location])
if self.getExtendedAttribute("LenientThis"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [%s]" % identifier,
[attr.location, self.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif identifier == "Pure":
if not attr.noArguments():
raise WebIDLError("[Pure] must take no arguments",
[attr.location])
self._setDependsOn("DOMState")
self._setAffects("Nothing")
elif identifier == "Constant" or identifier == "SameObject":
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
self._setDependsOn("Nothing")
self._setAffects("Nothing")
elif identifier == "Affects":
if not attr.hasValue():
raise WebIDLError("[Affects] takes an identifier",
[attr.location])
self._setAffects(attr.value())
elif identifier == "DependsOn":
if not attr.hasValue():
raise WebIDLError("[DependsOn] takes an identifier",
[attr.location])
if (attr.value() != "Everything" and attr.value() != "DOMState" and
not self.readonly):
raise WebIDLError("[DependsOn=%s] only allowed on "
"readonly attributes" % attr.value(),
[attr.location, self.location])
self._setDependsOn(attr.value())
elif identifier == "UseCounter":
if self.stringifier:
raise WebIDLError("[UseCounter] must not be used on a "
"stringifier attribute",
[attr.location, self.location])
elif identifier == "Unscopable":
if not attr.noArguments():
raise WebIDLError("[Unscopable] must take no arguments",
[attr.location])
if self.isStatic():
raise WebIDLError("[Unscopable] is only allowed on non-static "
"attributes and operations",
[attr.location, self.location])
elif (identifier == "Pref" or
identifier == "Deprecated" or
identifier == "SetterThrows" or
identifier == "Throws" or
identifier == "GetterThrows" or
identifier == "ChromeOnly" or
identifier == "Func" or
identifier == "SecureContext" or
identifier == "Frozen" or
identifier == "AvailableIn" or
identifier == "NewObject" or
identifier == "UnsafeInPrerendering" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "BinaryName"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on attribute" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.type.resolveType(parentScope)
IDLObjectWithIdentifier.resolve(self, parentScope)
def addExtendedAttributes(self, attrs):
attrs = self.checkForStringHandlingExtendedAttributes(attrs)
IDLInterfaceMember.addExtendedAttributes(self, attrs)
def hasLenientThis(self):
return self.lenientThis
def isMaplikeOrSetlikeAttr(self):
"""
True if this attribute was generated from an interface with
maplike/setlike (e.g. this is the size attribute for
maplike/setlike)
"""
return self.maplikeOrSetlike is not None
def isUnforgeable(self):
return self._unforgeable
def _getDependentObjects(self):
return set([self.type])
class IDLArgument(IDLObjectWithIdentifier):
def __init__(self, location, identifier, type, optional=False, defaultValue=None, variadic=False, dictionaryMember=False):
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
assert isinstance(type, IDLType)
self.type = type
self.optional = optional
self.defaultValue = defaultValue
self.variadic = variadic
self.dictionaryMember = dictionaryMember
self._isComplete = False
self.enforceRange = False
self.clamp = False
self._allowTreatNonCallableAsNull = False
self._extendedAttrDict = {}
assert not variadic or optional
assert not variadic or not defaultValue
def addExtendedAttributes(self, attrs):
attrs = self.checkForStringHandlingExtendedAttributes(
attrs,
isDictionaryMember=self.dictionaryMember,
isOptional=self.optional)
for attribute in attrs:
identifier = attribute.identifier()
if identifier == "Clamp":
if not attribute.noArguments():
raise WebIDLError("[Clamp] must take no arguments",
[attribute.location])
if self.enforceRange:
raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive",
[self.location])
self.clamp = True
elif identifier == "EnforceRange":
if not attribute.noArguments():
raise WebIDLError("[EnforceRange] must take no arguments",
[attribute.location])
if self.clamp:
raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive",
[self.location])
self.enforceRange = True
elif identifier == "TreatNonCallableAsNull":
self._allowTreatNonCallableAsNull = True
elif (self.dictionaryMember and
(identifier == "ChromeOnly" or identifier == "Func")):
if not self.optional:
raise WebIDLError("[%s] must not be used on a required "
"dictionary member" % identifier,
[attribute.location])
else:
raise WebIDLError("Unhandled extended attribute on %s" %
("a dictionary member" if self.dictionaryMember else
"an argument"),
[attribute.location])
attrlist = attribute.listValue()
self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def isComplete(self):
return self._isComplete
def complete(self, scope):
if self._isComplete:
return
self._isComplete = True
if not self.type.isComplete():
type = self.type.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
self.type = type
if ((self.type.isDictionary() or
self.type.isUnion() and self.type.unroll().hasDictionaryType()) and
self.optional and not self.defaultValue and not self.variadic):
# Default optional non-variadic dictionaries to null,
# for simplicity, so the codegen doesn't have to special-case this.
self.defaultValue = IDLNullValue(self.location)
elif self.type.isAny():
assert (self.defaultValue is None or
isinstance(self.defaultValue, IDLNullValue))
# optional 'any' values always have a default value
if self.optional and not self.defaultValue and not self.variadic:
# Set the default value to undefined, for simplicity, so the
# codegen doesn't have to special-case this.
self.defaultValue = IDLUndefinedValue(self.location)
# Now do the coercing thing; this needs to happen after the
# above creation of a default value.
if self.defaultValue:
self.defaultValue = self.defaultValue.coerceToType(self.type,
self.location)
assert self.defaultValue
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonCallableAsNull
def _getDependentObjects(self):
deps = set([self.type])
if self.defaultValue:
deps.add(self.defaultValue)
return deps
def canHaveMissingValue(self):
return self.optional and not self.defaultValue
class IDLCallback(IDLObjectWithScope):
def __init__(self, location, parentScope, identifier, returnType, arguments):
assert isinstance(returnType, IDLType)
self._returnType = returnType
# Clone the list
self._arguments = list(arguments)
IDLObjectWithScope.__init__(self, location, parentScope, identifier)
for (returnType, arguments) in self.signatures():
for argument in arguments:
argument.resolve(self)
self._treatNonCallableAsNull = False
self._treatNonObjectAsNull = False
def isCallback(self):
return True
def signatures(self):
return [(self._returnType, self._arguments)]
def finish(self, scope):
if not self._returnType.isComplete():
type = self._returnType.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
self._returnType = type
for argument in self._arguments:
if argument.type.isComplete():
continue
type = argument.type.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
argument.type = type
def validate(self):
pass
def addExtendedAttributes(self, attrs):
unhandledAttrs = []
for attr in attrs:
if attr.identifier() == "TreatNonCallableAsNull":
self._treatNonCallableAsNull = True
elif attr.identifier() == "TreatNonObjectAsNull":
self._treatNonObjectAsNull = True
else:
unhandledAttrs.append(attr)
if self._treatNonCallableAsNull and self._treatNonObjectAsNull:
raise WebIDLError("Cannot specify both [TreatNonCallableAsNull] "
"and [TreatNonObjectAsNull]", [self.location])
if len(unhandledAttrs) != 0:
IDLType.addExtendedAttributes(self, unhandledAttrs)
def _getDependentObjects(self):
return set([self._returnType] + self._arguments)
class IDLCallbackType(IDLType):
def __init__(self, location, callback):
IDLType.__init__(self, location, callback.identifier.name)
self.callback = callback
def isCallback(self):
return True
def tag(self):
return IDLType.Tags.callback
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isNonCallbackInterface() or other.isDate() or
other.isSequence())
def _getDependentObjects(self):
return self.callback._getDependentObjects()
class IDLMethodOverload:
"""
A class that represents a single overload of a WebIDL method. This is not
quite the same as an element of the "effective overload set" in the spec,
because separate IDLMethodOverloads are not created based on arguments being
optional. Rather, when multiple methods have the same name, there is an
IDLMethodOverload for each one, all hanging off an IDLMethod representing
the full set of overloads.
"""
def __init__(self, returnType, arguments, location):
self.returnType = returnType
# Clone the list of arguments, just in case
self.arguments = list(arguments)
self.location = location
def _getDependentObjects(self):
deps = set(self.arguments)
deps.add(self.returnType)
return deps
class IDLMethod(IDLInterfaceMember, IDLScope):
Special = enum(
'Getter',
'Setter',
'Creator',
'Deleter',
'LegacyCaller',
base=IDLInterfaceMember.Special
)
TypeSuffixModifier = enum(
'None',
'QMark',
'Brackets'
)
NamedOrIndexed = enum(
'Neither',
'Named',
'Indexed'
)
def __init__(self, location, identifier, returnType, arguments,
static=False, getter=False, setter=False, creator=False,
deleter=False, specialType=NamedOrIndexed.Neither,
legacycaller=False, stringifier=False, jsonifier=False,
maplikeOrSetlikeOrIterable=None):
# REVIEW: specialType is NamedOrIndexed -- wow, this is messed up.
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Method)
self._hasOverloads = False
assert isinstance(returnType, IDLType)
# self._overloads is a list of IDLMethodOverloads
self._overloads = [IDLMethodOverload(returnType, arguments, location)]
assert isinstance(static, bool)
self._static = static
assert isinstance(getter, bool)
self._getter = getter
assert isinstance(setter, bool)
self._setter = setter
assert isinstance(creator, bool)
self._creator = creator
assert isinstance(deleter, bool)
self._deleter = deleter
assert isinstance(legacycaller, bool)
self._legacycaller = legacycaller
assert isinstance(stringifier, bool)
self._stringifier = stringifier
assert isinstance(jsonifier, bool)
self._jsonifier = jsonifier
assert maplikeOrSetlikeOrIterable is None or isinstance(maplikeOrSetlikeOrIterable, IDLMaplikeOrSetlikeOrIterableBase)
self.maplikeOrSetlikeOrIterable = maplikeOrSetlikeOrIterable
self._specialType = specialType
self._unforgeable = False
self.dependsOn = "Everything"
self.affects = "Everything"
self.aliases = []
if static and identifier.name == "prototype":
raise WebIDLError("The identifier of a static operation must not be 'prototype'",
[location])
self.assertSignatureConstraints()
def __str__(self):
return "Method '%s'" % self.identifier
def assertSignatureConstraints(self):
if self._getter or self._deleter:
assert len(self._overloads) == 1
overload = self._overloads[0]
arguments = overload.arguments
assert len(arguments) == 1
assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or
arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long])
assert not arguments[0].optional and not arguments[0].variadic
assert not self._getter or not overload.returnType.isVoid()
if self._setter or self._creator:
assert len(self._overloads) == 1
arguments = self._overloads[0].arguments
assert len(arguments) == 2
assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or
arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long])
assert not arguments[0].optional and not arguments[0].variadic
assert not arguments[1].optional and not arguments[1].variadic
if self._stringifier:
assert len(self._overloads) == 1
overload = self._overloads[0]
assert len(overload.arguments) == 0
assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring]
if self._jsonifier:
assert len(self._overloads) == 1
overload = self._overloads[0]
assert len(overload.arguments) == 0
assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.object]
def isStatic(self):
return self._static
def forceStatic(self):
self._static = True
def isGetter(self):
return self._getter
def isSetter(self):
return self._setter
def isCreator(self):
return self._creator
def isDeleter(self):
return self._deleter
def isNamed(self):
assert (self._specialType == IDLMethod.NamedOrIndexed.Named or
self._specialType == IDLMethod.NamedOrIndexed.Indexed)
return self._specialType == IDLMethod.NamedOrIndexed.Named
def isIndexed(self):
assert (self._specialType == IDLMethod.NamedOrIndexed.Named or
self._specialType == IDLMethod.NamedOrIndexed.Indexed)
return self._specialType == IDLMethod.NamedOrIndexed.Indexed
def isLegacycaller(self):
return self._legacycaller
def isStringifier(self):
return self._stringifier
def isJsonifier(self):
return self._jsonifier
def isMaplikeOrSetlikeOrIterableMethod(self):
"""
True if this method was generated as part of a
maplike/setlike/etc interface (e.g. has/get methods)
"""
return self.maplikeOrSetlikeOrIterable is not None
def isSpecial(self):
return (self.isGetter() or
self.isSetter() or
self.isCreator() or
self.isDeleter() or
self.isLegacycaller() or
self.isStringifier() or
self.isJsonifier())
def hasOverloads(self):
return self._hasOverloads
def isIdentifierLess(self):
"""
True if the method name started with __, and if the method is not a
maplike/setlike method. Interfaces with maplike/setlike will generate
methods starting with __ for chrome only backing object access in JS
implemented interfaces, so while these functions use what is considered
an non-identifier name, they actually DO have an identifier.
"""
return (self.identifier.name[:2] == "__" and
not self.isMaplikeOrSetlikeOrIterableMethod())
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
IDLObjectWithIdentifier.resolve(self, parentScope)
IDLScope.__init__(self, self.location, parentScope, self.identifier)
for (returnType, arguments) in self.signatures():
for argument in arguments:
argument.resolve(self)
def addOverload(self, method):
assert len(method._overloads) == 1
if self._extendedAttrDict != method ._extendedAttrDict:
raise WebIDLError("Extended attributes differ on different "
"overloads of %s" % method.identifier,
[self.location, method.location])
self._overloads.extend(method._overloads)
self._hasOverloads = True
if self.isStatic() != method.isStatic():
raise WebIDLError("Overloaded identifier %s appears with different values of the 'static' attribute" % method.identifier,
[method.location])
if self.isLegacycaller() != method.isLegacycaller():
raise WebIDLError("Overloaded identifier %s appears with different values of the 'legacycaller' attribute" % method.identifier,
[method.location])
# Can't overload special things!
assert not self.isGetter()
assert not method.isGetter()
assert not self.isSetter()
assert not method.isSetter()
assert not self.isCreator()
assert not method.isCreator()
assert not self.isDeleter()
assert not method.isDeleter()
assert not self.isStringifier()
assert not method.isStringifier()
assert not self.isJsonifier()
assert not method.isJsonifier()
return self
def signatures(self):
return [(overload.returnType, overload.arguments) for overload in
self._overloads]
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
for overload in self._overloads:
returnType = overload.returnType
if not returnType.isComplete():
returnType = returnType.complete(scope)
assert not isinstance(returnType, IDLUnresolvedType)
assert not isinstance(returnType, IDLTypedefType)
assert not isinstance(returnType.name, IDLUnresolvedIdentifier)
overload.returnType = returnType
for argument in overload.arguments:
if not argument.isComplete():
argument.complete(scope)
assert argument.type.isComplete()
# Now compute various information that will be used by the
# WebIDL overload resolution algorithm.
self.maxArgCount = max(len(s[1]) for s in self.signatures())
self.allowedArgCounts = [i for i in range(self.maxArgCount+1)
if len(self.signaturesForArgCount(i)) != 0]
def validate(self):
IDLInterfaceMember.validate(self)
# Make sure our overloads are properly distinguishable and don't have
# different argument types before the distinguishing args.
for argCount in self.allowedArgCounts:
possibleOverloads = self.overloadsForArgCount(argCount)
if len(possibleOverloads) == 1:
continue
distinguishingIndex = self.distinguishingIndexForArgCount(argCount)
for idx in range(distinguishingIndex):
firstSigType = possibleOverloads[0].arguments[idx].type
for overload in possibleOverloads[1:]:
if overload.arguments[idx].type != firstSigType:
raise WebIDLError(
"Signatures for method '%s' with %d arguments have "
"different types of arguments at index %d, which "
"is before distinguishing index %d" %
(self.identifier.name, argCount, idx,
distinguishingIndex),
[self.location, overload.location])
overloadWithPromiseReturnType = None
overloadWithoutPromiseReturnType = None
for overload in self._overloads:
returnType = overload.returnType
if not returnType.unroll().isExposedInAllOf(self.exposureSet):
raise WebIDLError("Overload returns a type that is not exposed "
"everywhere where the method is exposed",
[overload.location])
variadicArgument = None
arguments = overload.arguments
for (idx, argument) in enumerate(arguments):
assert argument.type.isComplete()
if ((argument.type.isDictionary() and
argument.type.inner.canBeEmpty())or
(argument.type.isUnion() and
argument.type.unroll().hasPossiblyEmptyDictionaryType())):
# Optional dictionaries and unions containing optional
# dictionaries at the end of the list or followed by
# optional arguments must be optional.
if (not argument.optional and
all(arg.optional for arg in arguments[idx+1:])):
raise WebIDLError("Dictionary argument or union "
"argument containing a dictionary "
"not followed by a required argument "
"must be optional",
[argument.location])
# An argument cannot be a Nullable Dictionary
if argument.type.nullable():
raise WebIDLError("An argument cannot be a nullable "
"dictionary or nullable union "
"containing a dictionary",
[argument.location])
# Only the last argument can be variadic
if variadicArgument:
raise WebIDLError("Variadic argument is not last argument",
[variadicArgument.location])
if argument.variadic:
variadicArgument = argument
if returnType.isPromise():
overloadWithPromiseReturnType = overload
else:
overloadWithoutPromiseReturnType = overload
# Make sure either all our overloads return Promises or none do
if overloadWithPromiseReturnType and overloadWithoutPromiseReturnType:
raise WebIDLError("We have overloads with both Promise and "
"non-Promise return types",
[overloadWithPromiseReturnType.location,
overloadWithoutPromiseReturnType.location])
if overloadWithPromiseReturnType and self._legacycaller:
raise WebIDLError("May not have a Promise return type for a "
"legacycaller.",
[overloadWithPromiseReturnType.location])
if self.getExtendedAttribute("StaticClassOverride") and not \
(self.identifier.scope.isJSImplemented() and self.isStatic()):
raise WebIDLError("StaticClassOverride can be applied to static"
" methods on JS-implemented classes only.",
[self.location])
def overloadsForArgCount(self, argc):
return [overload for overload in self._overloads if
len(overload.arguments) == argc or
(len(overload.arguments) > argc and
all(arg.optional for arg in overload.arguments[argc:])) or
(len(overload.arguments) < argc and
len(overload.arguments) > 0 and
overload.arguments[-1].variadic)]
def signaturesForArgCount(self, argc):
return [(overload.returnType, overload.arguments) for overload
in self.overloadsForArgCount(argc)]
def locationsForArgCount(self, argc):
return [overload.location for overload in self.overloadsForArgCount(argc)]
def distinguishingIndexForArgCount(self, argc):
def isValidDistinguishingIndex(idx, signatures):
for (firstSigIndex, (firstRetval, firstArgs)) in enumerate(signatures[:-1]):
for (secondRetval, secondArgs) in signatures[firstSigIndex+1:]:
if idx < len(firstArgs):
firstType = firstArgs[idx].type
else:
assert(firstArgs[-1].variadic)
firstType = firstArgs[-1].type
if idx < len(secondArgs):
secondType = secondArgs[idx].type
else:
assert(secondArgs[-1].variadic)
secondType = secondArgs[-1].type
if not firstType.isDistinguishableFrom(secondType):
return False
return True
signatures = self.signaturesForArgCount(argc)
for idx in range(argc):
if isValidDistinguishingIndex(idx, signatures):
return idx
# No valid distinguishing index. Time to throw
locations = self.locationsForArgCount(argc)
raise WebIDLError("Signatures with %d arguments for method '%s' are not "
"distinguishable" % (argc, self.identifier.name),
locations)
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "GetterThrows":
raise WebIDLError("Methods must not be flagged as "
"[GetterThrows]",
[attr.location, self.location])
elif identifier == "SetterThrows":
raise WebIDLError("Methods must not be flagged as "
"[SetterThrows]",
[attr.location, self.location])
elif identifier == "Unforgeable":
if self.isStatic():
raise WebIDLError("[Unforgeable] is only allowed on non-static "
"methods", [attr.location, self.location])
self._unforgeable = True
elif identifier == "SameObject":
raise WebIDLError("Methods must not be flagged as [SameObject]",
[attr.location, self.location])
elif identifier == "Constant":
raise WebIDLError("Methods must not be flagged as [Constant]",
[attr.location, self.location])
elif identifier == "PutForwards":
raise WebIDLError("Only attributes support [PutForwards]",
[attr.location, self.location])
elif identifier == "LenientSetter":
raise WebIDLError("Only attributes support [LenientSetter]",
[attr.location, self.location])
elif identifier == "LenientFloat":
# This is called before we've done overload resolution
assert len(self.signatures()) == 1
sig = self.signatures()[0]
if not sig[0].isVoid():
raise WebIDLError("[LenientFloat] used on a non-void method",
[attr.location, self.location])
if not any(arg.type.includesRestrictedFloat() for arg in sig[1]):
raise WebIDLError("[LenientFloat] used on an operation with no "
"restricted float type arguments",
[attr.location, self.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif (identifier == "CrossOriginCallable" or
identifier == "WebGLHandlesContextLoss"):
# Known no-argument attributes.
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
elif identifier == "Pure":
if not attr.noArguments():
raise WebIDLError("[Pure] must take no arguments",
[attr.location])
self._setDependsOn("DOMState")
self._setAffects("Nothing")
elif identifier == "Affects":
if not attr.hasValue():
raise WebIDLError("[Affects] takes an identifier",
[attr.location])
self._setAffects(attr.value())
elif identifier == "DependsOn":
if not attr.hasValue():
raise WebIDLError("[DependsOn] takes an identifier",
[attr.location])
self._setDependsOn(attr.value())
elif identifier == "Alias":
if not attr.hasValue():
raise WebIDLError("[Alias] takes an identifier or string",
[attr.location])
self._addAlias(attr.value())
elif identifier == "UseCounter":
if self.isSpecial():
raise WebIDLError("[UseCounter] must not be used on a special "
"operation",
[attr.location, self.location])
elif identifier == "Unscopable":
if not attr.noArguments():
raise WebIDLError("[Unscopable] must take no arguments",
[attr.location])
if self.isStatic():
raise WebIDLError("[Unscopable] is only allowed on non-static "
"attributes and operations",
[attr.location, self.location])
elif (identifier == "Throws" or
identifier == "NewObject" or
identifier == "ChromeOnly" or
identifier == "UnsafeInPrerendering" or
identifier == "Pref" or
identifier == "Deprecated" or
identifier == "Func" or
identifier == "SecureContext" or
identifier == "AvailableIn" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "BinaryName" or
identifier == "StaticClassOverride"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on method" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def returnsPromise(self):
return self._overloads[0].returnType.isPromise()
def isUnforgeable(self):
return self._unforgeable
def _getDependentObjects(self):
deps = set()
for overload in self._overloads:
deps.update(overload._getDependentObjects())
return deps
class IDLImplementsStatement(IDLObject):
def __init__(self, location, implementor, implementee):
IDLObject.__init__(self, location)
self.implementor = implementor
self.implementee = implementee
self._finished = False
def finish(self, scope):
if self._finished:
return
assert(isinstance(self.implementor, IDLIdentifierPlaceholder))
assert(isinstance(self.implementee, IDLIdentifierPlaceholder))
implementor = self.implementor.finish(scope)
implementee = self.implementee.finish(scope)
# NOTE: we depend on not setting self.implementor and
# self.implementee here to keep track of the original
# locations.
if not isinstance(implementor, IDLInterface):
raise WebIDLError("Left-hand side of 'implements' is not an "
"interface",
[self.implementor.location])
if implementor.isCallback():
raise WebIDLError("Left-hand side of 'implements' is a callback "
"interface",
[self.implementor.location])
if not isinstance(implementee, IDLInterface):
raise WebIDLError("Right-hand side of 'implements' is not an "
"interface",
[self.implementee.location])
if implementee.isCallback():
raise WebIDLError("Right-hand side of 'implements' is a callback "
"interface",
[self.implementee.location])
implementor.addImplementedInterface(implementee)
self.implementor = implementor
self.implementee = implementee
def validate(self):
pass
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
class IDLExtendedAttribute(IDLObject):
"""
A class to represent IDL extended attributes so we can give them locations
"""
def __init__(self, location, tuple):
IDLObject.__init__(self, location)
self._tuple = tuple
def identifier(self):
return self._tuple[0]
def noArguments(self):
return len(self._tuple) == 1
def hasValue(self):
return len(self._tuple) >= 2 and isinstance(self._tuple[1], str)
def value(self):
assert(self.hasValue())
return self._tuple[1]
def hasArgs(self):
return (len(self._tuple) == 2 and isinstance(self._tuple[1], list) or
len(self._tuple) == 3)
def args(self):
assert(self.hasArgs())
# Our args are our last element
return self._tuple[-1]
def listValue(self):
"""
Backdoor for storing random data in _extendedAttrDict
"""
return list(self._tuple)[1:]
# Parser
class Tokenizer(object):
tokens = [
"INTEGER",
"FLOATLITERAL",
"IDENTIFIER",
"STRING",
"WHITESPACE",
"OTHER"
]
def t_FLOATLITERAL(self, t):
r'(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN'
t.value = float(t.value)
return t
def t_INTEGER(self, t):
r'-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)'
try:
# Can't use int(), because that doesn't handle octal properly.
t.value = parseInt(t.value)
except:
raise WebIDLError("Invalid integer literal",
[Location(lexer=self.lexer,
lineno=self.lexer.lineno,
lexpos=self.lexer.lexpos,
filename=self._filename)])
return t
def t_IDENTIFIER(self, t):
r'[A-Z_a-z][0-9A-Z_a-z-]*'
t.type = self.keywords.get(t.value, 'IDENTIFIER')
return t
def t_STRING(self, t):
r'"[^"]*"'
t.value = t.value[1:-1]
return t
def t_WHITESPACE(self, t):
r'[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+'
pass
def t_ELLIPSIS(self, t):
r'\.\.\.'
t.type = self.keywords.get(t.value)
return t
def t_OTHER(self, t):
r'[^\t\n\r 0-9A-Z_a-z]'
t.type = self.keywords.get(t.value, 'OTHER')
return t
keywords = {
"module": "MODULE",
"interface": "INTERFACE",
"partial": "PARTIAL",
"dictionary": "DICTIONARY",
"exception": "EXCEPTION",
"enum": "ENUM",
"callback": "CALLBACK",
"typedef": "TYPEDEF",
"implements": "IMPLEMENTS",
"const": "CONST",
"null": "NULL",
"true": "TRUE",
"false": "FALSE",
"serializer": "SERIALIZER",
"stringifier": "STRINGIFIER",
"jsonifier": "JSONIFIER",
"unrestricted": "UNRESTRICTED",
"attribute": "ATTRIBUTE",
"readonly": "READONLY",
"inherit": "INHERIT",
"static": "STATIC",
"getter": "GETTER",
"setter": "SETTER",
"creator": "CREATOR",
"deleter": "DELETER",
"legacycaller": "LEGACYCALLER",
"optional": "OPTIONAL",
"...": "ELLIPSIS",
"::": "SCOPE",
"Date": "DATE",
"DOMString": "DOMSTRING",
"ByteString": "BYTESTRING",
"USVString": "USVSTRING",
"any": "ANY",
"boolean": "BOOLEAN",
"byte": "BYTE",
"double": "DOUBLE",
"float": "FLOAT",
"long": "LONG",
"object": "OBJECT",
"octet": "OCTET",
"Promise": "PROMISE",
"required": "REQUIRED",
"sequence": "SEQUENCE",
"MozMap": "MOZMAP",
"short": "SHORT",
"unsigned": "UNSIGNED",
"void": "VOID",
":": "COLON",
";": "SEMICOLON",
"{": "LBRACE",
"}": "RBRACE",
"(": "LPAREN",
")": "RPAREN",
"[": "LBRACKET",
"]": "RBRACKET",
"?": "QUESTIONMARK",
",": "COMMA",
"=": "EQUALS",
"<": "LT",
">": "GT",
"ArrayBuffer": "ARRAYBUFFER",
"SharedArrayBuffer": "SHAREDARRAYBUFFER",
"or": "OR",
"maplike": "MAPLIKE",
"setlike": "SETLIKE",
"iterable": "ITERABLE",
"namespace": "NAMESPACE"
}
tokens.extend(keywords.values())
def t_error(self, t):
raise WebIDLError("Unrecognized Input",
[Location(lexer=self.lexer,
lineno=self.lexer.lineno,
lexpos=self.lexer.lexpos,
filename=self.filename)])
def __init__(self, outputdir, lexer=None):
if lexer:
self.lexer = lexer
else:
self.lexer = lex.lex(object=self,
outputdir=outputdir,
lextab='webidllex',
reflags=re.DOTALL)
class SqueakyCleanLogger(object):
errorWhitelist = [
# Web IDL defines the WHITESPACE token, but doesn't actually
# use it ... so far.
"Token 'WHITESPACE' defined, but not used",
# And that means we have an unused token
"There is 1 unused token",
# Web IDL defines a OtherOrComma rule that's only used in
# ExtendedAttributeInner, which we don't use yet.
"Rule 'OtherOrComma' defined, but not used",
# And an unused rule
"There is 1 unused rule",
# And the OtherOrComma grammar symbol is unreachable.
"Symbol 'OtherOrComma' is unreachable",
# Which means the Other symbol is unreachable.
"Symbol 'Other' is unreachable",
]
def __init__(self):
self.errors = []
def debug(self, msg, *args, **kwargs):
pass
info = debug
def warning(self, msg, *args, **kwargs):
if msg == "%s:%d: Rule %r defined, but not used" or \
msg == "%s:%d: Rule '%s' defined, but not used":
# Munge things so we don't have to hardcode filenames and
# line numbers in our whitelist.
whitelistmsg = "Rule %r defined, but not used"
whitelistargs = args[2:]
else:
whitelistmsg = msg
whitelistargs = args
if (whitelistmsg % whitelistargs) not in SqueakyCleanLogger.errorWhitelist:
self.errors.append(msg % args)
error = warning
def reportGrammarErrors(self):
if self.errors:
raise WebIDLError("\n".join(self.errors), [])
class Parser(Tokenizer):
def getLocation(self, p, i):
return Location(self.lexer, p.lineno(i), p.lexpos(i), self._filename)
def globalScope(self):
return self._globalScope
# The p_Foo functions here must match the WebIDL spec's grammar.
# It's acceptable to split things at '|' boundaries.
def p_Definitions(self, p):
"""
Definitions : ExtendedAttributeList Definition Definitions
"""
if p[2]:
p[0] = [p[2]]
p[2].addExtendedAttributes(p[1])
else:
assert not p[1]
p[0] = []
p[0].extend(p[3])
def p_DefinitionsEmpty(self, p):
"""
Definitions :
"""
p[0] = []
def p_Definition(self, p):
"""
Definition : CallbackOrInterface
| Namespace
| Partial
| Dictionary
| Exception
| Enum
| Typedef
| ImplementsStatement
"""
p[0] = p[1]
assert p[1] # We might not have implemented something ...
def p_CallbackOrInterfaceCallback(self, p):
"""
CallbackOrInterface : CALLBACK CallbackRestOrInterface
"""
if p[2].isInterface():
assert isinstance(p[2], IDLInterface)
p[2].setCallback(True)
p[0] = p[2]
def p_CallbackOrInterfaceInterface(self, p):
"""
CallbackOrInterface : Interface
"""
p[0] = p[1]
def p_CallbackRestOrInterface(self, p):
"""
CallbackRestOrInterface : CallbackRest
| Interface
"""
assert p[1]
p[0] = p[1]
def handleNonPartialObject(self, location, identifier, constructor,
constructorArgs, nonPartialArgs):
"""
This handles non-partial objects (interfaces and namespaces) by
checking for an existing partial object, and promoting it to
non-partial as needed. The return value is the non-partial object.
constructorArgs are all the args for the constructor except the last
one: isKnownNonPartial.
nonPartialArgs are the args for the setNonPartial call.
"""
# The name of the class starts with "IDL", so strip that off.
# Also, starts with a capital letter after that, so nix that
# as well.
prettyname = constructor.__name__[3:].lower()
try:
existingObj = self.globalScope()._lookupIdentifier(identifier)
if existingObj:
if not isinstance(existingObj, constructor):
raise WebIDLError("%s has the same name as "
"non-%s object" %
(prettyname.capitalize(), prettyname),
[location, existingObj.location])
existingObj.setNonPartial(*nonPartialArgs)
return existingObj
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
# True for isKnownNonPartial
return constructor(*(constructorArgs + [True]))
def p_Interface(self, p):
"""
Interface : INTERFACE IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[5]
parent = p[3]
p[0] = self.handleNonPartialObject(
location, identifier, IDLInterface,
[location, self.globalScope(), identifier, parent, members],
[location, parent, members])
def p_InterfaceForwardDecl(self, p):
"""
Interface : INTERFACE IDENTIFIER SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
try:
if self.globalScope()._lookupIdentifier(identifier):
p[0] = self.globalScope()._lookupIdentifier(identifier)
if not isinstance(p[0], IDLExternalInterface):
raise WebIDLError("Name collision between external "
"interface declaration for identifier "
"%s and %s" % (identifier.name, p[0]),
[location, p[0].location])
return
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
p[0] = IDLExternalInterface(location, self.globalScope(), identifier)
def p_Namespace(self, p):
"""
Namespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[4]
p[0] = self.handleNonPartialObject(
location, identifier, IDLNamespace,
[location, self.globalScope(), identifier, members],
[location, None, members])
def p_Partial(self, p):
"""
Partial : PARTIAL PartialDefinition
"""
p[0] = p[2]
def p_PartialDefinition(self, p):
"""
PartialDefinition : PartialInterface
| PartialNamespace
"""
p[0] = p[1]
def handlePartialObject(self, location, identifier, nonPartialConstructor,
nonPartialConstructorArgs,
partialConstructorArgs):
"""
This handles partial objects (interfaces and namespaces) by checking for
an existing non-partial object, and adding ourselves to it as needed.
The return value is our partial object. For now we just use
IDLPartialInterfaceOrNamespace for partial objects.
nonPartialConstructorArgs are all the args for the non-partial
constructor except the last two: members and isKnownNonPartial.
partialConstructorArgs are the arguments for the
IDLPartialInterfaceOrNamespace constructor, except the last one (the
non-partial object).
"""
# The name of the class starts with "IDL", so strip that off.
# Also, starts with a capital letter after that, so nix that
# as well.
prettyname = nonPartialConstructor.__name__[3:].lower()
nonPartialObject = None
try:
nonPartialObject = self.globalScope()._lookupIdentifier(identifier)
if nonPartialObject:
if not isinstance(nonPartialObject, nonPartialConstructor):
raise WebIDLError("Partial %s has the same name as "
"non-%s object" %
(prettyname, prettyname),
[location, nonPartialObject.location])
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
if not nonPartialObject:
nonPartialObject = nonPartialConstructor(
# No members, False for isKnownNonPartial
*(nonPartialConstructorArgs + [[], False]))
partialInterface = IDLPartialInterfaceOrNamespace(
*(partialConstructorArgs + [nonPartialObject]))
return partialInterface
def p_PartialInterface(self, p):
"""
PartialInterface : INTERFACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[4]
p[0] = self.handlePartialObject(
location, identifier, IDLInterface,
[location, self.globalScope(), identifier, None],
[location, identifier, members])
def p_PartialNamespace(self, p):
"""
PartialNamespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[4]
p[0] = self.handlePartialObject(
location, identifier, IDLNamespace,
[location, self.globalScope(), identifier],
[location, identifier, members])
def p_Inheritance(self, p):
"""
Inheritance : COLON ScopedName
"""
p[0] = IDLIdentifierPlaceholder(self.getLocation(p, 2), p[2])
def p_InheritanceEmpty(self, p):
"""
Inheritance :
"""
pass
def p_InterfaceMembers(self, p):
"""
InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers
"""
p[0] = [p[2]] if p[2] else []
assert not p[1] or p[2]
p[2].addExtendedAttributes(p[1])
p[0].extend(p[3])
def p_InterfaceMembersEmpty(self, p):
"""
InterfaceMembers :
"""
p[0] = []
def p_InterfaceMember(self, p):
"""
InterfaceMember : Const
| AttributeOrOperationOrMaplikeOrSetlikeOrIterable
"""
p[0] = p[1]
def p_Dictionary(self, p):
"""
Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[5]
p[0] = IDLDictionary(location, self.globalScope(), identifier, p[3], members)
def p_DictionaryMembers(self, p):
"""
DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers
|
"""
if len(p) == 1:
# We're at the end of the list
p[0] = []
return
# Add our extended attributes
p[2].addExtendedAttributes(p[1])
p[0] = [p[2]]
p[0].extend(p[3])
def p_DictionaryMember(self, p):
"""
DictionaryMember : Required Type IDENTIFIER Default SEMICOLON
"""
# These quack a lot like optional arguments, so just treat them that way.
t = p[2]
assert isinstance(t, IDLType)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
defaultValue = p[4]
optional = not p[1]
if not optional and defaultValue:
raise WebIDLError("Required dictionary members can't have a default value.",
[self.getLocation(p, 4)])
p[0] = IDLArgument(self.getLocation(p, 3), identifier, t,
optional=optional,
defaultValue=defaultValue, variadic=False,
dictionaryMember=True)
def p_Default(self, p):
"""
Default : EQUALS DefaultValue
|
"""
if len(p) > 1:
p[0] = p[2]
else:
p[0] = None
def p_DefaultValue(self, p):
"""
DefaultValue : ConstValue
| LBRACKET RBRACKET
"""
if len(p) == 2:
p[0] = p[1]
else:
assert len(p) == 3 # Must be []
p[0] = IDLEmptySequenceValue(self.getLocation(p, 1))
def p_Exception(self, p):
"""
Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON
"""
pass
def p_Enum(self, p):
"""
Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
values = p[4]
assert values
p[0] = IDLEnum(location, self.globalScope(), identifier, values)
def p_EnumValueList(self, p):
"""
EnumValueList : STRING EnumValueListComma
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_EnumValueListComma(self, p):
"""
EnumValueListComma : COMMA EnumValueListString
"""
p[0] = p[2]
def p_EnumValueListCommaEmpty(self, p):
"""
EnumValueListComma :
"""
p[0] = []
def p_EnumValueListString(self, p):
"""
EnumValueListString : STRING EnumValueListComma
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_EnumValueListStringEmpty(self, p):
"""
EnumValueListString :
"""
p[0] = []
def p_CallbackRest(self, p):
"""
CallbackRest : IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
p[0] = IDLCallback(self.getLocation(p, 1), self.globalScope(),
identifier, p[3], p[5])
def p_ExceptionMembers(self, p):
"""
ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers
|
"""
pass
def p_Typedef(self, p):
"""
Typedef : TYPEDEF Type IDENTIFIER SEMICOLON
"""
typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(),
p[2], p[3])
p[0] = typedef
def p_ImplementsStatement(self, p):
"""
ImplementsStatement : ScopedName IMPLEMENTS ScopedName SEMICOLON
"""
assert(p[2] == "implements")
implementor = IDLIdentifierPlaceholder(self.getLocation(p, 1), p[1])
implementee = IDLIdentifierPlaceholder(self.getLocation(p, 3), p[3])
p[0] = IDLImplementsStatement(self.getLocation(p, 1), implementor,
implementee)
def p_Const(self, p):
"""
Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON
"""
location = self.getLocation(p, 1)
type = p[2]
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
value = p[5]
p[0] = IDLConst(location, identifier, type, value)
def p_ConstValueBoolean(self, p):
"""
ConstValue : BooleanLiteral
"""
location = self.getLocation(p, 1)
booleanType = BuiltinTypes[IDLBuiltinType.Types.boolean]
p[0] = IDLValue(location, booleanType, p[1])
def p_ConstValueInteger(self, p):
"""
ConstValue : INTEGER
"""
location = self.getLocation(p, 1)
# We don't know ahead of time what type the integer literal is.
# Determine the smallest type it could possibly fit in and use that.
integerType = matchIntegerValueToType(p[1])
if integerType is None:
raise WebIDLError("Integer literal out of range", [location])
p[0] = IDLValue(location, integerType, p[1])
def p_ConstValueFloat(self, p):
"""
ConstValue : FLOATLITERAL
"""
location = self.getLocation(p, 1)
p[0] = IDLValue(location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1])
def p_ConstValueString(self, p):
"""
ConstValue : STRING
"""
location = self.getLocation(p, 1)
stringType = BuiltinTypes[IDLBuiltinType.Types.domstring]
p[0] = IDLValue(location, stringType, p[1])
def p_ConstValueNull(self, p):
"""
ConstValue : NULL
"""
p[0] = IDLNullValue(self.getLocation(p, 1))
def p_BooleanLiteralTrue(self, p):
"""
BooleanLiteral : TRUE
"""
p[0] = True
def p_BooleanLiteralFalse(self, p):
"""
BooleanLiteral : FALSE
"""
p[0] = False
def p_AttributeOrOperationOrMaplikeOrSetlikeOrIterable(self, p):
"""
AttributeOrOperationOrMaplikeOrSetlikeOrIterable : Attribute
| Maplike
| Setlike
| Iterable
| Operation
"""
p[0] = p[1]
def p_Iterable(self, p):
"""
Iterable : ITERABLE LT Type GT SEMICOLON
| ITERABLE LT Type COMMA Type GT SEMICOLON
"""
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__iterable",
allowDoubleUnderscore=True)
if (len(p) > 6):
keyType = p[3]
valueType = p[5]
else:
keyType = None
valueType = p[3]
p[0] = IDLIterable(location, identifier, keyType, valueType, self.globalScope())
def p_Setlike(self, p):
"""
Setlike : ReadOnly SETLIKE LT Type GT SEMICOLON
"""
readonly = p[1]
maplikeOrSetlikeType = p[2]
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__setlike",
allowDoubleUnderscore=True)
keyType = p[4]
valueType = keyType
p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType)
def p_Maplike(self, p):
"""
Maplike : ReadOnly MAPLIKE LT Type COMMA Type GT SEMICOLON
"""
readonly = p[1]
maplikeOrSetlikeType = p[2]
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__maplike",
allowDoubleUnderscore=True)
keyType = p[4]
valueType = p[6]
p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType)
def p_AttributeWithQualifier(self, p):
"""
Attribute : Qualifier AttributeRest
"""
static = IDLInterfaceMember.Special.Static in p[1]
stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
(location, identifier, type, readonly) = p[2]
p[0] = IDLAttribute(location, identifier, type, readonly,
static=static, stringifier=stringifier)
def p_AttributeInherited(self, p):
"""
Attribute : INHERIT AttributeRest
"""
(location, identifier, type, readonly) = p[2]
p[0] = IDLAttribute(location, identifier, type, readonly, inherit=True)
def p_Attribute(self, p):
"""
Attribute : AttributeRest
"""
(location, identifier, type, readonly) = p[1]
p[0] = IDLAttribute(location, identifier, type, readonly, inherit=False)
def p_AttributeRest(self, p):
"""
AttributeRest : ReadOnly ATTRIBUTE Type AttributeName SEMICOLON
"""
location = self.getLocation(p, 2)
readonly = p[1]
t = p[3]
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 4), p[4])
p[0] = (location, identifier, t, readonly)
def p_ReadOnly(self, p):
"""
ReadOnly : READONLY
"""
p[0] = True
def p_ReadOnlyEmpty(self, p):
"""
ReadOnly :
"""
p[0] = False
def p_Operation(self, p):
"""
Operation : Qualifiers OperationRest
"""
qualifiers = p[1]
# Disallow duplicates in the qualifier set
if not len(set(qualifiers)) == len(qualifiers):
raise WebIDLError("Duplicate qualifiers are not allowed",
[self.getLocation(p, 1)])
static = IDLInterfaceMember.Special.Static in p[1]
# If static is there that's all that's allowed. This is disallowed
# by the parser, so we can assert here.
assert not static or len(qualifiers) == 1
stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
# If stringifier is there that's all that's allowed. This is disallowed
# by the parser, so we can assert here.
assert not stringifier or len(qualifiers) == 1
getter = True if IDLMethod.Special.Getter in p[1] else False
setter = True if IDLMethod.Special.Setter in p[1] else False
creator = True if IDLMethod.Special.Creator in p[1] else False
deleter = True if IDLMethod.Special.Deleter in p[1] else False
legacycaller = True if IDLMethod.Special.LegacyCaller in p[1] else False
if getter or deleter:
if setter or creator:
raise WebIDLError("getter and deleter are incompatible with setter and creator",
[self.getLocation(p, 1)])
(returnType, identifier, arguments) = p[2]
assert isinstance(returnType, IDLType)
specialType = IDLMethod.NamedOrIndexed.Neither
if getter or deleter:
if len(arguments) != 1:
raise WebIDLError("%s has wrong number of arguments" %
("getter" if getter else "deleter"),
[self.getLocation(p, 2)])
argType = arguments[0].type
if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
specialType = IDLMethod.NamedOrIndexed.Named
elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
specialType = IDLMethod.NamedOrIndexed.Indexed
else:
raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
("getter" if getter else "deleter"),
[arguments[0].location])
if arguments[0].optional or arguments[0].variadic:
raise WebIDLError("%s cannot have %s argument" %
("getter" if getter else "deleter",
"optional" if arguments[0].optional else "variadic"),
[arguments[0].location])
if getter:
if returnType.isVoid():
raise WebIDLError("getter cannot have void return type",
[self.getLocation(p, 2)])
if setter or creator:
if len(arguments) != 2:
raise WebIDLError("%s has wrong number of arguments" %
("setter" if setter else "creator"),
[self.getLocation(p, 2)])
argType = arguments[0].type
if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
specialType = IDLMethod.NamedOrIndexed.Named
elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
specialType = IDLMethod.NamedOrIndexed.Indexed
else:
raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
("setter" if setter else "creator"),
[arguments[0].location])
if arguments[0].optional or arguments[0].variadic:
raise WebIDLError("%s cannot have %s argument" %
("setter" if setter else "creator",
"optional" if arguments[0].optional else "variadic"),
[arguments[0].location])
if arguments[1].optional or arguments[1].variadic:
raise WebIDLError("%s cannot have %s argument" %
("setter" if setter else "creator",
"optional" if arguments[1].optional else "variadic"),
[arguments[1].location])
if stringifier:
if len(arguments) != 0:
raise WebIDLError("stringifier has wrong number of arguments",
[self.getLocation(p, 2)])
if not returnType.isDOMString():
raise WebIDLError("stringifier must have DOMString return type",
[self.getLocation(p, 2)])
# identifier might be None. This is only permitted for special methods.
if not identifier:
if (not getter and not setter and not creator and
not deleter and not legacycaller and not stringifier):
raise WebIDLError("Identifier required for non-special methods",
[self.getLocation(p, 2)])
location = BuiltinLocation("<auto-generated-identifier>")
identifier = IDLUnresolvedIdentifier(
location,
"__%s%s%s%s%s%s%s" %
("named" if specialType == IDLMethod.NamedOrIndexed.Named else
"indexed" if specialType == IDLMethod.NamedOrIndexed.Indexed else "",
"getter" if getter else "",
"setter" if setter else "",
"deleter" if deleter else "",
"creator" if creator else "",
"legacycaller" if legacycaller else "",
"stringifier" if stringifier else ""),
allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 2), identifier, returnType, arguments,
static=static, getter=getter, setter=setter, creator=creator,
deleter=deleter, specialType=specialType,
legacycaller=legacycaller, stringifier=stringifier)
p[0] = method
def p_Stringifier(self, p):
"""
Operation : STRINGIFIER SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"__stringifier",
allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 1),
identifier,
returnType=BuiltinTypes[IDLBuiltinType.Types.domstring],
arguments=[],
stringifier=True)
p[0] = method
def p_Jsonifier(self, p):
"""
Operation : JSONIFIER SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"__jsonifier", allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 1),
identifier,
returnType=BuiltinTypes[IDLBuiltinType.Types.object],
arguments=[],
jsonifier=True)
p[0] = method
def p_QualifierStatic(self, p):
"""
Qualifier : STATIC
"""
p[0] = [IDLInterfaceMember.Special.Static]
def p_QualifierStringifier(self, p):
"""
Qualifier : STRINGIFIER
"""
p[0] = [IDLInterfaceMember.Special.Stringifier]
def p_Qualifiers(self, p):
"""
Qualifiers : Qualifier
| Specials
"""
p[0] = p[1]
def p_Specials(self, p):
"""
Specials : Special Specials
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_SpecialsEmpty(self, p):
"""
Specials :
"""
p[0] = []
def p_SpecialGetter(self, p):
"""
Special : GETTER
"""
p[0] = IDLMethod.Special.Getter
def p_SpecialSetter(self, p):
"""
Special : SETTER
"""
p[0] = IDLMethod.Special.Setter
def p_SpecialCreator(self, p):
"""
Special : CREATOR
"""
p[0] = IDLMethod.Special.Creator
def p_SpecialDeleter(self, p):
"""
Special : DELETER
"""
p[0] = IDLMethod.Special.Deleter
def p_SpecialLegacyCaller(self, p):
"""
Special : LEGACYCALLER
"""
p[0] = IDLMethod.Special.LegacyCaller
def p_OperationRest(self, p):
"""
OperationRest : ReturnType OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON
"""
p[0] = (p[1], p[2], p[4])
def p_OptionalIdentifier(self, p):
"""
OptionalIdentifier : IDENTIFIER
"""
p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
def p_OptionalIdentifierEmpty(self, p):
"""
OptionalIdentifier :
"""
pass
def p_ArgumentList(self, p):
"""
ArgumentList : Argument Arguments
"""
p[0] = [p[1]] if p[1] else []
p[0].extend(p[2])
def p_ArgumentListEmpty(self, p):
"""
ArgumentList :
"""
p[0] = []
def p_Arguments(self, p):
"""
Arguments : COMMA Argument Arguments
"""
p[0] = [p[2]] if p[2] else []
p[0].extend(p[3])
def p_ArgumentsEmpty(self, p):
"""
Arguments :
"""
p[0] = []
def p_Argument(self, p):
"""
Argument : ExtendedAttributeList Optional Type Ellipsis ArgumentName Default
"""
t = p[3]
assert isinstance(t, IDLType)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 5), p[5])
optional = p[2]
variadic = p[4]
defaultValue = p[6]
if not optional and defaultValue:
raise WebIDLError("Mandatory arguments can't have a default value.",
[self.getLocation(p, 6)])
# We can't test t.isAny() here and give it a default value as needed,
# since at this point t is not a fully resolved type yet (e.g. it might
# be a typedef). We'll handle the 'any' case in IDLArgument.complete.
if variadic:
if optional:
raise WebIDLError("Variadic arguments should not be marked optional.",
[self.getLocation(p, 2)])
optional = variadic
p[0] = IDLArgument(self.getLocation(p, 5), identifier, t, optional, defaultValue, variadic)
p[0].addExtendedAttributes(p[1])
def p_ArgumentName(self, p):
"""
ArgumentName : IDENTIFIER
| ATTRIBUTE
| CALLBACK
| CONST
| CREATOR
| DELETER
| DICTIONARY
| ENUM
| EXCEPTION
| GETTER
| IMPLEMENTS
| INHERIT
| INTERFACE
| ITERABLE
| LEGACYCALLER
| MAPLIKE
| PARTIAL
| REQUIRED
| SERIALIZER
| SETLIKE
| SETTER
| STATIC
| STRINGIFIER
| JSONIFIER
| TYPEDEF
| UNRESTRICTED
| NAMESPACE
"""
p[0] = p[1]
def p_AttributeName(self, p):
"""
AttributeName : IDENTIFIER
| REQUIRED
"""
p[0] = p[1]
def p_Optional(self, p):
"""
Optional : OPTIONAL
"""
p[0] = True
def p_OptionalEmpty(self, p):
"""
Optional :
"""
p[0] = False
def p_Required(self, p):
"""
Required : REQUIRED
"""
p[0] = True
def p_RequiredEmpty(self, p):
"""
Required :
"""
p[0] = False
def p_Ellipsis(self, p):
"""
Ellipsis : ELLIPSIS
"""
p[0] = True
def p_EllipsisEmpty(self, p):
"""
Ellipsis :
"""
p[0] = False
def p_ExceptionMember(self, p):
"""
ExceptionMember : Const
| ExceptionField
"""
pass
def p_ExceptionField(self, p):
"""
ExceptionField : Type IDENTIFIER SEMICOLON
"""
pass
def p_ExtendedAttributeList(self, p):
"""
ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET
"""
p[0] = [p[2]]
if p[3]:
p[0].extend(p[3])
def p_ExtendedAttributeListEmpty(self, p):
"""
ExtendedAttributeList :
"""
p[0] = []
def p_ExtendedAttribute(self, p):
"""
ExtendedAttribute : ExtendedAttributeNoArgs
| ExtendedAttributeArgList
| ExtendedAttributeIdent
| ExtendedAttributeNamedArgList
| ExtendedAttributeIdentList
"""
p[0] = IDLExtendedAttribute(self.getLocation(p, 1), p[1])
def p_ExtendedAttributeEmpty(self, p):
"""
ExtendedAttribute :
"""
pass
def p_ExtendedAttributes(self, p):
"""
ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes
"""
p[0] = [p[2]] if p[2] else []
p[0].extend(p[3])
def p_ExtendedAttributesEmpty(self, p):
"""
ExtendedAttributes :
"""
p[0] = []
def p_Other(self, p):
"""
Other : INTEGER
| FLOATLITERAL
| IDENTIFIER
| STRING
| OTHER
| ELLIPSIS
| COLON
| SCOPE
| SEMICOLON
| LT
| EQUALS
| GT
| QUESTIONMARK
| DATE
| DOMSTRING
| BYTESTRING
| USVSTRING
| ANY
| ATTRIBUTE
| BOOLEAN
| BYTE
| LEGACYCALLER
| CONST
| CREATOR
| DELETER
| DOUBLE
| EXCEPTION
| FALSE
| FLOAT
| GETTER
| IMPLEMENTS
| INHERIT
| INTERFACE
| LONG
| MODULE
| NULL
| OBJECT
| OCTET
| OPTIONAL
| SEQUENCE
| MOZMAP
| SETTER
| SHORT
| STATIC
| STRINGIFIER
| JSONIFIER
| TRUE
| TYPEDEF
| UNSIGNED
| VOID
"""
pass
def p_OtherOrComma(self, p):
"""
OtherOrComma : Other
| COMMA
"""
pass
def p_TypeSingleType(self, p):
"""
Type : SingleType
"""
p[0] = p[1]
def p_TypeUnionType(self, p):
"""
Type : UnionType TypeSuffix
"""
p[0] = self.handleModifiers(p[1], p[2])
def p_SingleTypeNonAnyType(self, p):
"""
SingleType : NonAnyType
"""
p[0] = p[1]
def p_SingleTypeAnyType(self, p):
"""
SingleType : ANY TypeSuffixStartingWithArray
"""
p[0] = self.handleModifiers(BuiltinTypes[IDLBuiltinType.Types.any], p[2])
def p_UnionType(self, p):
"""
UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN
"""
types = [p[2], p[4]]
types.extend(p[5])
p[0] = IDLUnionType(self.getLocation(p, 1), types)
def p_UnionMemberTypeNonAnyType(self, p):
"""
UnionMemberType : NonAnyType
"""
p[0] = p[1]
def p_UnionMemberTypeArrayOfAny(self, p):
"""
UnionMemberTypeArrayOfAny : ANY LBRACKET RBRACKET
"""
p[0] = IDLArrayType(self.getLocation(p, 2),
BuiltinTypes[IDLBuiltinType.Types.any])
def p_UnionMemberType(self, p):
"""
UnionMemberType : UnionType TypeSuffix
| UnionMemberTypeArrayOfAny TypeSuffix
"""
p[0] = self.handleModifiers(p[1], p[2])
def p_UnionMemberTypes(self, p):
"""
UnionMemberTypes : OR UnionMemberType UnionMemberTypes
"""
p[0] = [p[2]]
p[0].extend(p[3])
def p_UnionMemberTypesEmpty(self, p):
"""
UnionMemberTypes :
"""
p[0] = []
def p_NonAnyType(self, p):
"""
NonAnyType : PrimitiveOrStringType TypeSuffix
| ARRAYBUFFER TypeSuffix
| SHAREDARRAYBUFFER TypeSuffix
| OBJECT TypeSuffix
"""
if p[1] == "object":
type = BuiltinTypes[IDLBuiltinType.Types.object]
elif p[1] == "ArrayBuffer":
type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer]
elif p[1] == "SharedArrayBuffer":
type = BuiltinTypes[IDLBuiltinType.Types.SharedArrayBuffer]
else:
type = BuiltinTypes[p[1]]
p[0] = self.handleModifiers(type, p[2])
def p_NonAnyTypeSequenceType(self, p):
"""
NonAnyType : SEQUENCE LT Type GT Null
"""
innerType = p[3]
type = IDLSequenceType(self.getLocation(p, 1), innerType)
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
# Note: Promise<void> is allowed, so we want to parametrize on
# ReturnType, not Type. Also, we want this to end up picking up
# the Promise interface for now, hence the games with IDLUnresolvedType.
def p_NonAnyTypePromiseType(self, p):
"""
NonAnyType : PROMISE LT ReturnType GT Null
"""
innerType = p[3]
promiseIdent = IDLUnresolvedIdentifier(self.getLocation(p, 1), "Promise")
type = IDLUnresolvedType(self.getLocation(p, 1), promiseIdent, p[3])
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
def p_NonAnyTypeMozMapType(self, p):
"""
NonAnyType : MOZMAP LT Type GT Null
"""
innerType = p[3]
type = IDLMozMapType(self.getLocation(p, 1), innerType)
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
def p_NonAnyTypeScopedName(self, p):
"""
NonAnyType : ScopedName TypeSuffix
"""
assert isinstance(p[1], IDLUnresolvedIdentifier)
if p[1].name == "Promise":
raise WebIDLError("Promise used without saying what it's "
"parametrized over",
[self.getLocation(p, 1)])
type = None
try:
if self.globalScope()._lookupIdentifier(p[1]):
obj = self.globalScope()._lookupIdentifier(p[1])
assert not obj.isType()
if obj.isTypedef():
type = IDLTypedefType(self.getLocation(p, 1), obj.innerType,
obj.identifier.name)
elif obj.isCallback() and not obj.isInterface():
type = IDLCallbackType(self.getLocation(p, 1), obj)
else:
type = IDLWrapperType(self.getLocation(p, 1), p[1])
p[0] = self.handleModifiers(type, p[2])
return
except:
pass
type = IDLUnresolvedType(self.getLocation(p, 1), p[1])
p[0] = self.handleModifiers(type, p[2])
def p_NonAnyTypeDate(self, p):
"""
NonAnyType : DATE TypeSuffix
"""
p[0] = self.handleModifiers(BuiltinTypes[IDLBuiltinType.Types.date],
p[2])
def p_ConstType(self, p):
"""
ConstType : PrimitiveOrStringType Null
"""
type = BuiltinTypes[p[1]]
if p[2]:
type = IDLNullableType(self.getLocation(p, 1), type)
p[0] = type
def p_ConstTypeIdentifier(self, p):
"""
ConstType : IDENTIFIER Null
"""
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
type = IDLUnresolvedType(self.getLocation(p, 1), identifier)
if p[2]:
type = IDLNullableType(self.getLocation(p, 1), type)
p[0] = type
def p_PrimitiveOrStringTypeUint(self, p):
"""
PrimitiveOrStringType : UnsignedIntegerType
"""
p[0] = p[1]
def p_PrimitiveOrStringTypeBoolean(self, p):
"""
PrimitiveOrStringType : BOOLEAN
"""
p[0] = IDLBuiltinType.Types.boolean
def p_PrimitiveOrStringTypeByte(self, p):
"""
PrimitiveOrStringType : BYTE
"""
p[0] = IDLBuiltinType.Types.byte
def p_PrimitiveOrStringTypeOctet(self, p):
"""
PrimitiveOrStringType : OCTET
"""
p[0] = IDLBuiltinType.Types.octet
def p_PrimitiveOrStringTypeFloat(self, p):
"""
PrimitiveOrStringType : FLOAT
"""
p[0] = IDLBuiltinType.Types.float
def p_PrimitiveOrStringTypeUnrestictedFloat(self, p):
"""
PrimitiveOrStringType : UNRESTRICTED FLOAT
"""
p[0] = IDLBuiltinType.Types.unrestricted_float
def p_PrimitiveOrStringTypeDouble(self, p):
"""
PrimitiveOrStringType : DOUBLE
"""
p[0] = IDLBuiltinType.Types.double
def p_PrimitiveOrStringTypeUnrestictedDouble(self, p):
"""
PrimitiveOrStringType : UNRESTRICTED DOUBLE
"""
p[0] = IDLBuiltinType.Types.unrestricted_double
def p_PrimitiveOrStringTypeDOMString(self, p):
"""
PrimitiveOrStringType : DOMSTRING
"""
p[0] = IDLBuiltinType.Types.domstring
def p_PrimitiveOrStringTypeBytestring(self, p):
"""
PrimitiveOrStringType : BYTESTRING
"""
p[0] = IDLBuiltinType.Types.bytestring
def p_PrimitiveOrStringTypeUSVString(self, p):
"""
PrimitiveOrStringType : USVSTRING
"""
p[0] = IDLBuiltinType.Types.usvstring
def p_UnsignedIntegerTypeUnsigned(self, p):
"""
UnsignedIntegerType : UNSIGNED IntegerType
"""
# Adding one to a given signed integer type gets you the unsigned type:
p[0] = p[2] + 1
def p_UnsignedIntegerType(self, p):
"""
UnsignedIntegerType : IntegerType
"""
p[0] = p[1]
def p_IntegerTypeShort(self, p):
"""
IntegerType : SHORT
"""
p[0] = IDLBuiltinType.Types.short
def p_IntegerTypeLong(self, p):
"""
IntegerType : LONG OptionalLong
"""
if p[2]:
p[0] = IDLBuiltinType.Types.long_long
else:
p[0] = IDLBuiltinType.Types.long
def p_OptionalLong(self, p):
"""
OptionalLong : LONG
"""
p[0] = True
def p_OptionalLongEmpty(self, p):
"""
OptionalLong :
"""
p[0] = False
def p_TypeSuffixBrackets(self, p):
"""
TypeSuffix : LBRACKET RBRACKET TypeSuffix
"""
p[0] = [(IDLMethod.TypeSuffixModifier.Brackets, self.getLocation(p, 1))]
p[0].extend(p[3])
def p_TypeSuffixQMark(self, p):
"""
TypeSuffix : QUESTIONMARK TypeSuffixStartingWithArray
"""
p[0] = [(IDLMethod.TypeSuffixModifier.QMark, self.getLocation(p, 1))]
p[0].extend(p[2])
def p_TypeSuffixEmpty(self, p):
"""
TypeSuffix :
"""
p[0] = []
def p_TypeSuffixStartingWithArray(self, p):
"""
TypeSuffixStartingWithArray : LBRACKET RBRACKET TypeSuffix
"""
p[0] = [(IDLMethod.TypeSuffixModifier.Brackets, self.getLocation(p, 1))]
p[0].extend(p[3])
def p_TypeSuffixStartingWithArrayEmpty(self, p):
"""
TypeSuffixStartingWithArray :
"""
p[0] = []
def p_Null(self, p):
"""
Null : QUESTIONMARK
|
"""
if len(p) > 1:
p[0] = True
else:
p[0] = False
def p_ReturnTypeType(self, p):
"""
ReturnType : Type
"""
p[0] = p[1]
def p_ReturnTypeVoid(self, p):
"""
ReturnType : VOID
"""
p[0] = BuiltinTypes[IDLBuiltinType.Types.void]
def p_ScopedName(self, p):
"""
ScopedName : AbsoluteScopedName
| RelativeScopedName
"""
p[0] = p[1]
def p_AbsoluteScopedName(self, p):
"""
AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts
"""
assert False
pass
def p_RelativeScopedName(self, p):
"""
RelativeScopedName : IDENTIFIER ScopedNameParts
"""
assert not p[2] # Not implemented!
p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
def p_ScopedNameParts(self, p):
"""
ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts
"""
assert False
pass
def p_ScopedNamePartsEmpty(self, p):
"""
ScopedNameParts :
"""
p[0] = None
def p_ExtendedAttributeNoArgs(self, p):
"""
ExtendedAttributeNoArgs : IDENTIFIER
"""
p[0] = (p[1],)
def p_ExtendedAttributeArgList(self, p):
"""
ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN
"""
p[0] = (p[1], p[3])
def p_ExtendedAttributeIdent(self, p):
"""
ExtendedAttributeIdent : IDENTIFIER EQUALS STRING
| IDENTIFIER EQUALS IDENTIFIER
"""
p[0] = (p[1], p[3])
def p_ExtendedAttributeNamedArgList(self, p):
"""
ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN
"""
p[0] = (p[1], p[3], p[5])
def p_ExtendedAttributeIdentList(self, p):
"""
ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN
"""
p[0] = (p[1], p[4])
def p_IdentifierList(self, p):
"""
IdentifierList : IDENTIFIER Identifiers
"""
idents = list(p[2])
idents.insert(0, p[1])
p[0] = idents
def p_IdentifiersList(self, p):
"""
Identifiers : COMMA IDENTIFIER Identifiers
"""
idents = list(p[3])
idents.insert(0, p[2])
p[0] = idents
def p_IdentifiersEmpty(self, p):
"""
Identifiers :
"""
p[0] = []
def p_error(self, p):
if not p:
raise WebIDLError("Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both",
[self._filename])
else:
raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)])
def __init__(self, outputdir='', lexer=None):
Tokenizer.__init__(self, outputdir, lexer)
logger = SqueakyCleanLogger()
try:
self.parser = yacc.yacc(module=self,
outputdir=outputdir,
tabmodule='webidlyacc',
errorlog=logger,
debug=False
# Pickling the grammar is a speedup in
# some cases (older Python?) but a
# significant slowdown in others.
# We're not pickling for now, until it
# becomes a speedup again.
# , picklefile='WebIDLGrammar.pkl'
)
finally:
logger.reportGrammarErrors()
self._globalScope = IDLScope(BuiltinLocation("<Global Scope>"), None, None)
# To make our test harness work, pretend like we have a primary global already.
# Note that we _don't_ set _globalScope.primaryGlobalAttr,
# so we'll still be able to detect multiple PrimaryGlobal extended attributes.
self._globalScope.primaryGlobalName = "FakeTestPrimaryGlobal"
self._globalScope.globalNames.add("FakeTestPrimaryGlobal")
self._globalScope.globalNameMapping["FakeTestPrimaryGlobal"].add("FakeTestPrimaryGlobal")
# And we add the special-cased "System" global name, which
# doesn't have any corresponding interfaces.
self._globalScope.globalNames.add("System")
self._globalScope.globalNameMapping["System"].add("BackstagePass")
self._installBuiltins(self._globalScope)
self._productions = []
self._filename = "<builtin>"
self.lexer.input(Parser._builtins)
self._filename = None
self.parser.parse(lexer=self.lexer, tracking=True)
def _installBuiltins(self, scope):
assert isinstance(scope, IDLScope)
# xrange omits the last value.
for x in xrange(IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.Float64Array + 1):
builtin = BuiltinTypes[x]
name = builtin.name
typedef = IDLTypedef(BuiltinLocation("<builtin type>"), scope, builtin, name)
@ staticmethod
def handleModifiers(type, modifiers):
for (modifier, modifierLocation) in modifiers:
assert (modifier == IDLMethod.TypeSuffixModifier.QMark or
modifier == IDLMethod.TypeSuffixModifier.Brackets)
if modifier == IDLMethod.TypeSuffixModifier.QMark:
type = IDLNullableType(modifierLocation, type)
elif modifier == IDLMethod.TypeSuffixModifier.Brackets:
type = IDLArrayType(modifierLocation, type)
return type
def parse(self, t, filename=None):
self.lexer.input(t)
# for tok in iter(self.lexer.token, None):
# print tok
self._filename = filename
self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True))
self._filename = None
def finish(self):
# If we have interfaces that are iterable, create their
# iterator interfaces and add them to the productions array.
interfaceStatements = []
for p in self._productions:
if isinstance(p, IDLInterface):
interfaceStatements.append(p)
if p.identifier.name == "Navigator":
navigatorInterface = p
iterableIteratorIface = None
for iface in interfaceStatements:
navigatorProperty = iface.getNavigatorProperty()
if navigatorProperty:
# We're generating a partial interface to add a readonly
# property to the Navigator interface for every interface
# annotated with NavigatorProperty.
partialInterface = IDLPartialInterfaceOrNamespace(
iface.location,
IDLUnresolvedIdentifier(iface.location, "Navigator"),
[ navigatorProperty ],
navigatorInterface)
self._productions.append(partialInterface)
iterable = None
# We haven't run finish() on the interface yet, so we don't know
# whether our interface is maplike/setlike/iterable or not. This
# means we have to loop through the members to see if we have an
# iterable member.
for m in iface.members:
if isinstance(m, IDLIterable):
iterable = m
break
if iterable and iterable.isPairIterator():
def simpleExtendedAttr(str):
return IDLExtendedAttribute(iface.location, (str, ))
nextMethod = IDLMethod(
iface.location,
IDLUnresolvedIdentifier(iface.location, "next"),
BuiltinTypes[IDLBuiltinType.Types.object], [])
nextMethod.addExtendedAttributes([simpleExtendedAttr("Throws")])
itr_ident = IDLUnresolvedIdentifier(iface.location,
iface.identifier.name + "Iterator")
itr_iface = IDLInterface(iface.location, self.globalScope(),
itr_ident, None, [nextMethod],
isKnownNonPartial=True)
itr_iface.addExtendedAttributes([simpleExtendedAttr("NoInterfaceObject")])
# Make sure the exposure set for the iterator interface is the
# same as the exposure set for the iterable interface, because
# we're going to generate methods on the iterable that return
# instances of the iterator.
itr_iface._exposureGlobalNames = set(iface._exposureGlobalNames)
# Always append generated iterable interfaces after the
# interface they're a member of, otherwise nativeType generation
# won't work correctly.
itr_iface.iterableInterface = iface
self._productions.append(itr_iface)
iterable.iteratorType = IDLWrapperType(iface.location, itr_iface)
# Then, finish all the IDLImplementsStatements. In particular, we
# have to make sure we do those before we do the IDLInterfaces.
# XXX khuey hates this bit and wants to nuke it from orbit.
implementsStatements = [p for p in self._productions if
isinstance(p, IDLImplementsStatement)]
otherStatements = [p for p in self._productions if
not isinstance(p, IDLImplementsStatement)]
for production in implementsStatements:
production.finish(self.globalScope())
for production in otherStatements:
production.finish(self.globalScope())
# Do any post-finish validation we need to do
for production in self._productions:
production.validate()
# De-duplicate self._productions, without modifying its order.
seen = set()
result = []
for p in self._productions:
if p not in seen:
seen.add(p)
result.append(p)
return result
def reset(self):
return Parser(lexer=self.lexer)
# Builtin IDL defined by WebIDL
_builtins = """
typedef unsigned long long DOMTimeStamp;
typedef (ArrayBufferView or ArrayBuffer) BufferSource;
"""
def main():
# Parse arguments.
from optparse import OptionParser
usageString = "usage: %prog [options] files"
o = OptionParser(usage=usageString)
o.add_option("--cachedir", dest='cachedir', default=None,
help="Directory in which to cache lex/parse tables.")
o.add_option("--verbose-errors", action='store_true', default=False,
help="When an error happens, display the Python traceback.")
(options, args) = o.parse_args()
if len(args) < 1:
o.error(usageString)
fileList = args
baseDir = os.getcwd()
# Parse the WebIDL.
parser = Parser(options.cachedir)
try:
for filename in fileList:
fullPath = os.path.normpath(os.path.join(baseDir, filename))
f = open(fullPath, 'rb')
lines = f.readlines()
f.close()
print fullPath
parser.parse(''.join(lines), fullPath)
parser.finish()
except WebIDLError, e:
if options.verbose_errors:
traceback.print_exc()
else:
print e
if __name__ == '__main__':
main()
|
AlexTISYoung/hlmm | refs/heads/master | hlmm/__init__.py | 1 | import hetlm
import hetlmm |
40223244/cdb-2 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/sre_constants.py | 692 | #
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20031017
#MAXREPEAT = 2147483648
#from _sre import MAXREPEAT
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
pass
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def makedict(list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
def dump(f, d, prefix):
items = sorted(d.items(), key=lambda a: a[1])
for k, v in items:
f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
f = open("sre_constants.h", "w")
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
dump(f, OPCODES, "SRE_OP")
dump(f, ATCODES, "SRE")
dump(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
f.close()
print("done")
|
ukncsc/viper | refs/heads/master | viper/core/database.py | 2 | # This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
from __future__ import unicode_literals # make all strings unicode in python2
import os
import json
from datetime import datetime
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Text
from sqlalchemy import Table, Index, create_engine, and_
from sqlalchemy.pool import NullPool
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref, sessionmaker
from sqlalchemy.exc import SQLAlchemyError, IntegrityError
from viper.common.out import print_warning, print_error
from viper.common.objects import File
from viper.core.project import __project__
from viper.core.config import Config
cfg = Config()
Base = declarative_base()
association_table = Table(
'association',
Base.metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('note_id', Integer, ForeignKey('note.id')),
Column('malware_id', Integer, ForeignKey('malware.id')),
Column('analysis_id', Integer, ForeignKey('analysis.id'))
)
class Malware(Base):
__tablename__ = 'malware'
id = Column(Integer(), primary_key=True)
name = Column(String(255), nullable=True)
size = Column(Integer(), nullable=False)
type = Column(Text(), nullable=True)
mime = Column(String(255), nullable=True)
md5 = Column(String(32), nullable=False, index=True)
crc32 = Column(String(8), nullable=False)
sha1 = Column(String(40), nullable=False)
sha256 = Column(String(64), nullable=False, index=True)
sha512 = Column(String(128), nullable=False)
ssdeep = Column(String(255), nullable=True)
created_at = Column(DateTime(timezone=False), default=datetime.now(), nullable=False)
parent_id = Column(Integer(), ForeignKey('malware.id'))
parent = relationship('Malware', lazy='subquery', remote_side=[id])
tag = relationship(
'Tag',
secondary=association_table,
backref=backref('malware')
)
note = relationship(
'Note',
cascade='all, delete',
secondary=association_table,
backref=backref('malware')
)
analysis = relationship(
'Analysis',
cascade='all, delete',
secondary=association_table,
backref=backref('malware')
)
__table_args__ = (Index(
'hash_index',
'md5',
'crc32',
'sha1',
'sha256',
'sha512',
unique=True
),)
def to_dict(self):
row_dict = {}
for column in self.__table__.columns:
value = getattr(self, column.name)
row_dict[column.name] = value
return row_dict
def __repr__(self):
return "<Malware('{0}','{1}')>".format(self.id, self.md5)
def __init__(self,
md5,
crc32,
sha1,
sha256,
sha512,
size,
type=None,
mime=None,
ssdeep=None,
name=None,
parent=None):
self.md5 = md5
self.sha1 = sha1
self.crc32 = crc32
self.sha256 = sha256
self.sha512 = sha512
self.size = size
self.type = type
self.mime = mime
self.ssdeep = ssdeep
self.name = name
self.parent = parent
class Tag(Base):
__tablename__ = 'tag'
id = Column(Integer(), primary_key=True)
tag = Column(String(255), nullable=False, unique=True, index=True)
def to_dict(self):
row_dict = {}
for column in self.__table__.columns:
value = getattr(self, column.name)
row_dict[column.name] = value
return row_dict
def __repr__(self):
return "<Tag ('{0}','{1}'>".format(self.id, self.tag)
def __init__(self, tag):
self.tag = tag
class Note(Base):
__tablename__ = 'note'
id = Column(Integer(), primary_key=True)
title = Column(String(255), nullable=True)
body = Column(Text(), nullable=False)
def to_dict(self):
row_dict = {}
for column in self.__table__.columns:
value = getattr(self, column.name)
row_dict[column.name] = value
return row_dict
def __repr__(self):
return "<Note ('{0}','{1}'>".format(self.id, self.title)
def __init__(self, title, body):
self.title = title
self.body = body
class Analysis(Base):
__tablename__ = 'analysis'
id = Column(Integer(), primary_key=True)
cmd_line = Column(String(255), nullable=True)
results = Column(Text(), nullable=False)
stored_at = Column(DateTime(timezone=False), default=datetime.now(), nullable=False)
def to_dict(self):
row_dict = {}
for column in self.__table__.columns:
value = getattr(self, column.name)
row_dict[column.name] = value
return row_dict
def __repr__(self):
return "<Note ('{0}','{1}'>".format(self.id, self.cmd_line)
def __init__(self, cmd_line, results):
self.cmd_line = cmd_line
self.results = results
class Database:
#__metaclass__ = Singleton
def __init__(self):
if hasattr(cfg, "database") and cfg.database.connection:
self._connect_database(cfg.database.connection)
else:
self._connect_database("")
self.engine.echo = False
self.engine.pool_timeout = 60
Base.metadata.create_all(self.engine)
self.Session = sessionmaker(bind=self.engine)
def __del__(self):
self.engine.dispose()
def _connect_database(self, connection):
if connection.startswith("mysql+pymysql"):
self.engine = create_engine(connection)
elif connection.startswith("mysql"):
self.engine = create_engine(connection, connect_args={"check_same_thread": False})
elif connection.startswith("postgresql"):
self.engine = create_engine(connection, connect_args={"sslmode": "disable"})
else:
db_path = os.path.join(__project__.get_path(), 'viper.db')
self.engine = create_engine('sqlite:///{0}'.format(db_path), poolclass=NullPool)
def add_tags(self, sha256, tags):
session = self.Session()
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
if not malware_entry:
return
# The tags argument might be a list, a single tag, or a
# comma-separated list of tags.
if isinstance(tags, str):
tags = tags.strip()
if ',' in tags:
tags = tags.split(',')
else:
tags = tags.split()
for tag in tags:
tag = tag.strip().lower()
if tag == '':
continue
try:
malware_entry.tag.append(Tag(tag))
session.commit()
except IntegrityError:
session.rollback()
try:
malware_entry.tag.append(session.query(Tag).filter(Tag.tag==tag).first())
session.commit()
except SQLAlchemyError:
session.rollback()
def list_tags(self):
session = self.Session()
rows = session.query(Tag).all()
return rows
def delete_tag(self, tag_name, sha256):
session = self.Session()
try:
# First remove the tag from the sample
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
tag = session.query(Tag).filter(Tag.tag==tag_name).first()
try:
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
malware_entry.tag.remove(tag)
session.commit()
except:
print_error("Tag {0} does not exist for this sample".format(tag_name))
# If tag has no entries drop it
count = len(self.find('tag', tag_name))
if count == 0:
session.delete(tag)
session.commit()
print_warning("Tag {0} has no additional entries dropping from Database".format(tag_name))
except SQLAlchemyError as e:
print_error("Unable to delete tag: {0}".format(e))
session.rollback()
finally:
session.close()
def add_note(self, sha256, title, body):
session = self.Session()
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
if not malware_entry:
return
try:
malware_entry.note.append(Note(title, body))
session.commit()
except SQLAlchemyError as e:
print_error("Unable to add note: {0}".format(e))
session.rollback()
finally:
session.close()
def get_note(self, note_id):
session = self.Session()
note = session.query(Note).get(note_id)
return note
def edit_note(self, note_id, body):
session = self.Session()
try:
session.query(Note).get(note_id).body = body
session.commit()
except SQLAlchemyError as e:
print_error("Unable to update note: {0}".format(e))
session.rollback()
finally:
session.close()
def delete_note(self, note_id):
session = self.Session()
try:
note = session.query(Note).get(note_id)
session.delete(note)
session.commit()
except SQLAlchemyError as e:
print_error("Unable to delete note: {0}".format(e))
session.rollback()
finally:
session.close()
def add(self, obj, name=None, tags=None, parent_sha=None, notes_body=None, notes_title=None):
session = self.Session()
if not name:
name = obj.name
if parent_sha:
parent_sha = session.query(Malware).filter(Malware.sha256 == parent_sha).first()
if isinstance(obj, File):
try:
malware_entry = Malware(md5=obj.md5,
crc32=obj.crc32,
sha1=obj.sha1,
sha256=obj.sha256,
sha512=obj.sha512,
size=obj.size,
type=obj.type,
mime=obj.mime,
ssdeep=obj.ssdeep,
name=name,
parent=parent_sha)
session.add(malware_entry)
session.commit()
except IntegrityError:
session.rollback()
malware_entry = session.query(Malware).filter(Malware.md5 == obj.md5).first()
except SQLAlchemyError as e:
print_error("Unable to store file: {0}".format(e))
session.rollback()
return False
if tags:
self.add_tags(sha256=obj.sha256, tags=tags)
if notes_body and notes_title:
self.add_note(sha256=obj.sha256, title=notes_title, body=notes_body)
return True
def rename(self, id, name):
session = self.Session()
if not name:
return False
try:
malware = session.query(Malware).get(id)
if not malware:
print_error("The opened file doesn't appear to be in the database, have you stored it yet?")
return False
malware.name = name
session.commit()
except SQLAlchemyError as e:
print_error("Unable to rename file: {}".format(e))
session.rollback()
return False
finally:
session.close()
return True
def delete_file(self, id):
session = self.Session()
try:
malware = session.query(Malware).get(id)
if not malware:
print_error("The opened file doesn't appear to be in the database, have you stored it yet?")
return False
session.delete(malware)
session.commit()
except SQLAlchemyError as e:
print_error("Unable to delete file: {0}".format(e))
session.rollback()
return False
finally:
session.close()
return True
def find(self, key, value=None, offset=0):
session = self.Session()
offset = int(offset)
rows = None
if key == 'all':
rows = session.query(Malware).all()
elif key == 'ssdeep':
ssdeep_val = str(value)
rows = session.query(Malware).filter(Malware.ssdeep.contains(ssdeep_val)).all()
elif key == 'any':
prefix_val = str(value)
rows = session.query(Malware).filter(Malware.name.startswith(prefix_val) |
Malware.md5.startswith(prefix_val) |
Malware.sha1.startswith(prefix_val) |
Malware.sha256.startswith(prefix_val) |
Malware.type.contains(prefix_val) |
Malware.mime.contains(prefix_val)).all()
elif key == 'latest':
if value:
try:
value = int(value)
except ValueError:
print_error("You need to specify a valid number as a limit for your query")
return None
else:
value = 5
rows = session.query(Malware).order_by(Malware.id.desc()).limit(value).offset(offset)
elif key == 'md5':
rows = session.query(Malware).filter(Malware.md5 == value).all()
elif key == 'sha1':
rows = session.query(Malware).filter(Malware.sha1 == value).all()
elif key == 'sha256':
rows = session.query(Malware).filter(Malware.sha256 == value).all()
elif key == 'tag':
rows = session.query(Malware).filter(self.tag_filter(value)).all()
elif key == 'name':
if not value:
print_error("You need to specify a valid file name pattern (you can use wildcards)")
return None
if '*' in value:
value = value.replace('*', '%')
else:
value = '%{0}%'.format(value)
rows = session.query(Malware).filter(Malware.name.like(value)).all()
elif key == 'note':
value = '%{0}%'.format(value)
rows = session.query(Malware).filter(Malware.note.any(Note.body.like(value))).all()
elif key == 'type':
rows = session.query(Malware).filter(Malware.type.like('%{0}%'.format(value))).all()
elif key == 'mime':
rows = session.query(Malware).filter(Malware.mime.like('%{0}%'.format(value))).all()
else:
print_error("No valid term specified")
return rows
def tag_filter(self, value):
if not value:
return None
if "|" in value and "&" in value:
print_error("Do not use &' and '|' at the same time.")
return None
if "|" in value:
filt = Malware.tag.any(Tag.tag.in_(value.lower().split("|")))
elif "&" in value:
tags = []
for tt in value.lower().split("&"):
tags.append(Malware.tag.any(Tag.tag == tt))
filt = and_(*tags)
else:
filt = Malware.tag.any(Tag.tag == value.lower())
return filt
def get_sample_count(self):
session = self.Session()
return session.query(Malware.id).count()
def add_parent(self, malware_sha256, parent_sha256):
session = self.Session()
try:
malware = session.query(Malware).filter(Malware.sha256 == malware_sha256).first()
malware.parent = session.query(Malware).filter(Malware.sha256 == parent_sha256).first()
session.commit()
except SQLAlchemyError as e:
print_error("Unable to add parent: {0}".format(e))
session.rollback()
finally:
session.close()
def delete_parent(self, malware_sha256):
session = self.Session()
try:
malware = session.query(Malware).filter(Malware.sha256 == malware_sha256).first()
malware.parent = None
session.commit()
except SQLAlchemyError as e:
print_error("Unable to delete parent: {0}".format(e))
session.rollback()
finally:
session.close()
def get_children(self, parent_id):
session = self.Session()
children = session.query(Malware).filter(Malware.parent_id == parent_id).all()
child_samples = ''
for child in children:
child_samples += '{0},'.format(child.sha256)
return child_samples
# Store Module / Cmd Output
def add_analysis(self, sha256, cmd_line, results):
results = json.dumps(results)
session = self.Session()
malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first()
if not malware_entry:
return
try:
malware_entry.analysis.append(Analysis(cmd_line, results))
session.commit()
except SQLAlchemyError as e:
print_error("Unable to store analysis: {0}".format(e))
session.rollback()
finally:
session.close()
def get_analysis(self, analysis_id):
session = self.Session()
analysis = session.query(Analysis).get(analysis_id)
return analysis
|
paxchristos/Semc-ICS-kernel | refs/heads/master | arch/ia64/scripts/unwcheck.py | 916 | #!/usr/bin/env python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
|
cilekagaci/engerek | refs/heads/master | src/engerek/zemberek.py | 1 | # coding=utf-8
import jnius
from base import Stemmer
from common import infinitive_form
TurkiyeTurkcesi = jnius.autoclass('net.zemberek.tr.yapi.TurkiyeTurkcesi')
Zemberek = jnius.autoclass('net.zemberek.erisim.Zemberek')
class ZemberekStemmer(Stemmer):
"""Turkish stemmer based on Zemberek library."""
LANGUAGE = TurkiyeTurkcesi()
ZEMBEREK = Zemberek(LANGUAGE)
ROOT_FINDER = ZEMBEREK.kokBulucu()
def __init__(self, prefer_infinitive_form=False):
"""Creates a ZemberekStemmer.
:param prefer_infinitive_form: return verb stems in infinitive form if
``True``; otherwise return only the root (default value is
``False``)
:type prefer_infinitive_form: bool
"""
self.prefer_infinitive_form = prefer_infinitive_form
def stem(self, word):
"""Finds the stem of the given word.
:param word: word to be stemmed
:type word: unicode
:return: list of possible stems of the word
:rtype: list of unicode
"""
roots = self.ROOT_FINDER.kokBul(word)
if len(roots) == 0:
return [word]
stems = []
for root in roots:
stem = root.icerik().decode('utf-8')
type = root.tip().toString()
if self.prefer_infinitive_form and type == 'FIIL':
stems.append(infinitive_form(stem))
else:
stems.append(stem)
return stems
|
GoodgameStudios/crossbar | refs/heads/master | crossbar/controller/process.py | 1 | #####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
import os
import sys
import pkg_resources
# backport of shutil.which
import shutilwhich # noqa
import shutil
from datetime import datetime
from twisted.internet.defer import Deferred, DeferredList, returnValue, inlineCallbacks
from twisted.internet.error import ProcessExitedAlready
from twisted.internet.threads import deferToThread
from autobahn.util import utcnow, utcstr
from autobahn.wamp.exception import ApplicationError
from autobahn.wamp.types import PublishOptions, RegisterOptions
from autobahn.twisted.util import sleep
from crossbar.common import checkconfig
from crossbar.twisted.processutil import WorkerProcessEndpoint
from crossbar.controller.native import create_native_worker_client_factory
from crossbar.controller.guest import create_guest_worker_client_factory
from crossbar.controller.processtypes import RouterWorkerProcess, \
ContainerWorkerProcess, \
GuestWorkerProcess
from crossbar.common.process import NativeProcessSession
from twisted.internet import reactor
from crossbar.twisted.endpoint import create_listening_port_from_config
from autobahn.twisted.websocket import WampWebSocketServerFactory
from crossbar.platform import HAS_FSNOTIFY, DirWatcher
from crossbar._logging import make_logger
__all__ = ('NodeControllerSession', 'create_process_env')
def check_executable(fn):
"""
Check whether the given path is an executable.
"""
return os.path.exists(fn) and os.access(fn, os.F_OK | os.X_OK) and not os.path.isdir(fn)
class ManagementTransport:
"""
Local management service running inside node controller.
"""
def __init__(self, config, who):
"""
Ctor.
:param config: The configuration the manhole service was started with.
:type config: dict
:param who: Who triggered creation of this service.
:type who: str
"""
self.config = config
self.who = who
self.status = 'starting'
self.created = datetime.utcnow()
self.started = None
self.port = None
def marshal(self):
"""
Marshal object information for use with WAMP calls/events.
:returns: dict -- The marshalled information.
"""
now = datetime.utcnow()
return {
'created': utcstr(self.created),
'status': self.status,
'started': utcstr(self.started) if self.started else None,
'uptime': (now - self.started).total_seconds() if self.started else None,
'config': self.config
}
class NodeControllerSession(NativeProcessSession):
"""
Singleton node WAMP session hooked up to the node management router.
This class exposes the node's management API.
"""
log = make_logger()
def __init__(self, node):
"""
:param node: The node singleton for this node controller session.
:type node: obj
"""
NativeProcessSession.__init__(self)
# associated node
self._node = node
self._node_id = node._node_id
self._realm = node._realm
self.cbdir = self._node._cbdir
self._created = utcnow()
self._pid = os.getpid()
# map of worker processes: worker_id -> NativeWorkerProcess
self._workers = {}
self._management_transport = None
def onConnect(self):
# self._uri_prefix = 'crossbar.node.{}'.format(self.config.extra.node)
self._uri_prefix = 'crossbar.node.{}'.format(self._node_id)
NativeProcessSession.onConnect(self, False)
# self.join(self.config.realm)
self.join(self._realm)
@inlineCallbacks
def onJoin(self, details):
# When a (native) worker process has connected back to the router of
# the node controller, the worker will publish this event
# to signal it's readyness.
#
def on_worker_ready(res):
id = res['id']
if id in self._workers:
ready = self._workers[id].ready
if not ready.called:
# fire the Deferred previously stored for
# signaling "worker ready"
ready.callback(id)
else:
self.log.error("INTERNAL ERROR: on_worker_ready() fired for process {process} - ready already called",
process=id)
else:
self.log.error("INTERNAL ERROR: on_worker_ready() fired for process {process} - no process with that ID",
process=id)
self.subscribe(on_worker_ready, 'crossbar.node.{}.on_worker_ready'.format(self._node_id))
yield NativeProcessSession.onJoin(self, details)
# register node controller procedures: 'crossbar.node.<ID>.<PROCEDURE>'
#
procs = [
'shutdown',
'start_management_transport',
'get_info',
'get_workers',
'get_worker_log',
'start_router',
'stop_router',
'start_container',
'stop_container',
'start_guest',
'stop_guest',
]
dl = []
for proc in procs:
uri = '{}.{}'.format(self._uri_prefix, proc)
self.log.debug("Registering procedure '{uri}'", uri=uri)
dl.append(self.register(getattr(self, proc), uri, options=RegisterOptions(details_arg='details')))
regs = yield DeferredList(dl)
self.log.debug("{me} registered {registers} procedures",
me=self.__class__.__name__, registers=len(regs))
# FIXME: publish node ready event
@inlineCallbacks
def shutdown(self, restart=False, details=None):
"""
Stop this node.
"""
self.log.warn("Shutting down node...")
shutdown_topic = 'crossbar.node.{}.on_shutdown'.format(self._node_id)
shutdown_info = {
}
yield self.publish(shutdown_topic, shutdown_info, options=PublishOptions(acknowledge=True))
yield sleep(3)
self._node._reactor.stop()
@inlineCallbacks
def start_management_transport(self, config, details=None):
"""
Start transport for local management router.
:param config: Transport configuration.
:type config: obj
"""
if self.debug:
self.log.debug("{me}.start_management_transport",
me=self.__class__.__name__, config=config)
if self._management_transport:
emsg = "ERROR: could not start management transport - already running (or starting)"
self.log.failure(emsg)
raise ApplicationError("crossbar.error.already_started", emsg)
try:
checkconfig.check_listening_transport_websocket(config)
except Exception as e:
emsg = "ERROR: could not start management transport - invalid configuration ({})".format(e)
self.log.failure(emsg)
raise ApplicationError('crossbar.error.invalid_configuration', emsg)
self._management_transport = ManagementTransport(config, details.caller)
factory = WampWebSocketServerFactory(self._node._router_session_factory, debug=False)
factory.setProtocolOptions(failByDrop=False)
factory.noisy = False
starting_topic = '{}.on_management_transport_starting'.format(self._uri_prefix)
starting_info = self._management_transport.marshal()
# the caller gets a progressive result ..
if details.progress:
details.progress(starting_info)
# .. while all others get an event
self.publish(starting_topic, starting_info, options=PublishOptions(exclude=[details.caller]))
try:
self._management_transport.port = yield create_listening_port_from_config(config['endpoint'], factory, self.cbdir, reactor)
except Exception as e:
self._management_transport = None
emsg = "ERROR: local management service endpoint cannot listen - {}".format(e)
self.log.failure(emsg)
raise ApplicationError("crossbar.error.cannot_listen", emsg)
# alright, manhole has started
self._management_transport.started = datetime.utcnow()
self._management_transport.status = 'started'
started_topic = '{}.on_management_transport_started'.format(self._uri_prefix)
started_info = self._management_transport.marshal()
self.publish(started_topic, started_info, options=PublishOptions(exclude=[details.caller]))
returnValue(started_info)
def get_info(self, details=None):
"""
Return node information.
"""
return {
'created': self._created,
'pid': self._pid,
'workers': len(self._workers),
'directory': self.cbdir,
'wamplets': self._get_wamplets()
}
def _get_wamplets(self):
"""
List installed WAMPlets.
"""
res = []
for entrypoint in pkg_resources.iter_entry_points('autobahn.twisted.wamplet'):
try:
e = entrypoint.load()
except Exception as e:
pass
else:
ep = {}
ep['dist'] = entrypoint.dist.key
ep['version'] = entrypoint.dist.version
ep['location'] = entrypoint.dist.location
ep['name'] = entrypoint.name
ep['module_name'] = entrypoint.module_name
ep['entry_point'] = str(entrypoint)
if hasattr(e, '__doc__') and e.__doc__:
ep['doc'] = e.__doc__.strip()
else:
ep['doc'] = None
ep['meta'] = e(None)
res.append(ep)
return sorted(res)
def get_workers(self, details=None):
"""
Returns the list of processes currently running on this node.
:returns: list -- List of worker processes.
"""
now = datetime.utcnow()
res = []
for worker in sorted(self._workers.values(), key=lambda w: w.created):
res.append({
'id': worker.id,
'pid': worker.pid,
'type': worker.TYPE,
'status': worker.status,
'created': utcstr(worker.created),
'started': utcstr(worker.started),
'startup_time': (worker.started - worker.created).total_seconds() if worker.started else None,
'uptime': (now - worker.started).total_seconds() if worker.started else None,
})
return res
def get_worker_log(self, id, limit=None, details=None):
"""
Get buffered worker log.
:param limit: Optionally, limit the amount of log entries returned
to the last N entries.
:type limit: None or int
:returns: list -- Buffered log.
"""
if id not in self._workers:
emsg = "ERROR: no worker with ID '{}'".format(id)
raise ApplicationError('crossbar.error.no_such_worker', emsg)
return self._workers[id].getlog(limit)
def start_router(self, id, options=None, details=None):
"""
Start a new router worker: a Crossbar.io native worker process
that runs a WAMP router.
:param id: The worker ID to start this router with.
:type id: str
:param options: The router worker options.
:type options: dict
"""
self.log.debug("NodeControllerSession.start_router({id}, options={options})",
id=id, options=options)
return self._start_native_worker('router', id, options, details=details)
def start_container(self, id, options=None, details=None):
"""
Start a new container worker: a Crossbar.io native worker process
that can host WAMP application components written in Python.
:param id: The worker ID to start this container with.
:type id: str
:param options: The container worker options.
:type options: dict
"""
self.log.debug("NodeControllerSession.start_container(id = {id}, options = {options})",
id=id, options=options)
return self._start_native_worker('container', id, options, details=details)
def _start_native_worker(self, wtype, id, options=None, details=None):
assert(wtype in ['router', 'container'])
# prohibit starting a worker twice
#
if id in self._workers:
emsg = "ERROR: could not start worker - a worker with ID '{}'' is already running (or starting)".format(id)
self.log.error(emsg)
raise ApplicationError('crossbar.error.worker_already_running', emsg)
# check worker options
#
options = options or {}
try:
if wtype == 'router':
checkconfig.check_router_options(options)
elif wtype == 'container':
checkconfig.check_container_options(options)
else:
raise Exception("logic error")
except Exception as e:
emsg = "ERROR: could not start native worker - invalid configuration ({})".format(e)
self.log.error(emsg)
raise ApplicationError('crossbar.error.invalid_configuration', emsg)
# allow override Python executable from options
#
if 'python' in options:
exe = options['python']
# the executable must be an absolute path, e.g. /home/oberstet/pypy-2.2.1-linux64/bin/pypy
#
if not os.path.isabs(exe):
emsg = "ERROR: python '{}' from worker options must be an absolute path".format(exe)
self.log.error(emsg)
raise ApplicationError('crossbar.error.invalid_configuration', emsg)
# of course the path must exist and actually be executable
#
if not (os.path.isfile(exe) and os.access(exe, os.X_OK)):
emsg = "ERROR: python '{}' from worker options does not exist or isn't an executable".format(exe)
self.log.error(emsg)
raise ApplicationError('crossbar.error.invalid_configuration', emsg)
else:
exe = sys.executable
# all native workers (routers and containers for now) start from the same script
#
filename = pkg_resources.resource_filename('crossbar', 'worker/process.py')
# assemble command line for forking the worker
#
args = [exe, "-u", filename]
args.extend(["--cbdir", self._node._cbdir])
args.extend(["--node", str(self._node_id)])
args.extend(["--worker", str(id)])
args.extend(["--realm", self._realm])
args.extend(["--type", wtype])
# allow override worker process title from options
#
if options.get('title', None):
args.extend(['--title', options['title']])
# allow overriding debug flag from options
#
if options.get('debug', self.debug):
args.append('--debug')
# forward explicit reactor selection
#
if 'reactor' in options and sys.platform in options['reactor']:
args.extend(['--reactor', options['reactor'][sys.platform]])
elif self._node.options.reactor:
args.extend(['--reactor', self._node.options.reactor])
# create worker process environment
#
worker_env = create_process_env(options)
# log name of worker
#
worker_logname = {'router': 'Router', 'container': 'Container'}.get(wtype, 'Worker')
# topic URIs used (later)
#
if wtype == 'router':
starting_topic = 'crossbar.node.{}.on_router_starting'.format(self._node_id)
started_topic = 'crossbar.node.{}.on_router_started'.format(self._node_id)
elif wtype == 'container':
starting_topic = 'crossbar.node.{}.on_container_starting'.format(self._node_id)
started_topic = 'crossbar.node.{}.on_container_started'.format(self._node_id)
else:
raise Exception("logic error")
# add worker tracking instance to the worker map ..
#
if wtype == 'router':
worker = RouterWorkerProcess(self, id, details.caller, keeplog=options.get('traceback', None))
elif wtype == 'container':
worker = ContainerWorkerProcess(self, id, details.caller, keeplog=options.get('traceback', None))
else:
raise Exception("logic error")
self._workers[id] = worker
# create a (custom) process endpoint
#
ep = WorkerProcessEndpoint(self._node._reactor, exe, args, env=worker_env, worker=worker)
# ready handling
#
def on_ready_success(id):
self.log.info("{worker} with ID '{id}' and PID {pid} started",
worker=worker_logname, id=worker.id, pid=worker.pid)
self._node._reactor.addSystemEventTrigger(
'before', 'shutdown',
self._cleanup_worker, self._node._reactor, worker,
)
worker.status = 'started'
worker.started = datetime.utcnow()
started_info = {
'id': worker.id,
'status': worker.status,
'started': utcstr(worker.started),
'who': worker.who
}
self.publish(started_topic, started_info, options=PublishOptions(exclude=[details.caller]))
return started_info
def on_ready_error(err):
del self._workers[worker.id]
emsg = 'ERROR: failed to start native worker - {}'.format(err.value)
self.log.error(emsg)
raise ApplicationError("crossbar.error.cannot_start", emsg, worker.getlog())
worker.ready.addCallbacks(on_ready_success, on_ready_error)
def on_exit_success(res):
del self._workers[worker.id]
def on_exit_error(err):
del self._workers[worker.id]
worker.exit.addCallbacks(on_exit_success, on_exit_error)
# create a transport factory for talking WAMP to the native worker
#
transport_factory = create_native_worker_client_factory(self._node._router_session_factory, worker.ready, worker.exit)
transport_factory.noisy = False
self._workers[id].factory = transport_factory
# now (immediately before actually forking) signal the starting of the worker
#
starting_info = {
'id': id,
'status': worker.status,
'created': utcstr(worker.created),
'who': worker.who
}
# the caller gets a progressive result ..
if details.progress:
details.progress(starting_info)
# .. while all others get an event
self.publish(starting_topic, starting_info, options=PublishOptions(exclude=[details.caller]))
# now actually fork the worker ..
#
self.log.info("Starting {worker} with ID '{id}'...",
worker=worker_logname, id=id)
self.log.debug("{worker} '{id}' command line is '{cmdline}'",
worker=worker_logname, id=id, cmdline=' '.join(args))
d = ep.connect(transport_factory)
def on_connect_success(proto):
# this seems to be called immediately when the child process
# has been forked. even if it then immediately fails because
# e.g. the executable doesn't even exist. in other words,
# I'm not sure under what conditions the deferred will errback ..
pid = proto.transport.pid
self.log.debug("Native worker process connected with PID {pid}",
pid=pid)
# note the PID of the worker
worker.pid = pid
# proto is an instance of NativeWorkerClientProtocol
worker.proto = proto
worker.status = 'connected'
worker.connected = datetime.utcnow()
def on_connect_error(err):
# not sure when this errback is triggered at all ..
self.log.error("ERROR: Connecting forked native worker failed - {err}", err=err)
# in any case, forward the error ..
worker.ready.errback(err)
d.addCallbacks(on_connect_success, on_connect_error)
return worker.ready
@staticmethod
def _cleanup_worker(reactor, worker):
"""
This is called during reactor shutdown and ensures we wait for our
subprocesses to shut down nicely.
"""
log = make_logger()
try:
log.info("sending TERM to subprocess {pid}", pid=worker.pid)
worker.proto.transport.signalProcess('TERM')
# wait for the subprocess to shutdown; could add a timeout
# after which we send a KILL maybe?
d = Deferred()
def protocol_closed(_):
log.debug("{pid} exited", pid=worker.pid)
d.callback(None)
# await worker's timely demise
worker.exit.addCallback(protocol_closed)
def timeout(tried):
if d.called:
return
log.info("waiting for {pid} to exit...", pid=worker.pid)
reactor.callLater(1, timeout, tried + 1)
if tried > 20: # or just wait forever?
log.info("Sending SIGKILL to {pid}", pid=worker.pid)
worker.proto.transport.signalProcess('KILL')
d.callback(None) # or recurse more?
timeout(0)
return d
except ProcessExitedAlready:
pass # ignore; it's already dead
def stop_router(self, id, kill=False, details=None):
"""
Stops a currently running router worker.
:param id: The ID of the router worker to stop.
:type id: str
:param kill: If `True`, kill the process. Otherwise, gracefully
shut down the worker.
:type kill: bool
"""
self.log.debug("NodeControllerSession.stop_router({id}, kill={kill})",
id=id, kill=kill)
return self._stop_native_worker('router', id, kill, details=details)
def stop_container(self, id, kill=False, details=None):
"""
Stops a currently running container worker.
:param id: The ID of the container worker to stop.
:type id: str
:param kill: If `True`, kill the process. Otherwise, gracefully
shut down the worker.
:type kill: bool
"""
self.log.debug("NodeControllerSession.stop_container({id}, kill={kill})",
id=id, kill=kill)
return self._stop_native_worker('container', id, kill, details=details)
def _stop_native_worker(self, wtype, id, kill, details=None):
assert(wtype in ['router', 'container'])
if id not in self._workers or self._workers[id].TYPE != wtype:
emsg = "ERROR: no {} worker with ID '{}' currently running".format(wtype, id)
raise ApplicationError('crossbar.error.worker_not_running', emsg)
worker = self._workers[id]
if worker.status != 'started':
emsg = "ERROR: worker with ID '{}' is not in 'started' status (current status: '{}')".format(id, worker.status)
raise ApplicationError('crossbar.error.worker_not_running', emsg)
if kill:
self.log.info("Killing {wtype} worker with ID '{id}'",
wtype=wtype, id=id)
self._workers[id].proto.transport.signalProcess("KILL")
else:
self.log.info("Stopping {wtype} worker with ID '{id}'",
wtype=wtype, id=id)
self._workers[id].factory.stopFactory()
self._workers[id].proto.transport.signalProcess('TERM')
def start_guest(self, id, config, details=None):
"""
Start a new guest process on this node.
:param config: The guest process configuration.
:type config: obj
:returns: int -- The PID of the new process.
"""
# prohibit starting a worker twice
#
if id in self._workers:
emsg = "ERROR: could not start worker - a worker with ID '{}' is already running (or starting)".format(id)
self.log.error(emsg)
raise ApplicationError('crossbar.error.worker_already_running', emsg)
try:
checkconfig.check_guest(config)
except Exception as e:
raise ApplicationError('crossbar.error.invalid_configuration', 'invalid guest worker configuration: {}'.format(e))
options = config.get('options', {})
# guest process working directory
#
workdir = self._node._cbdir
if 'workdir' in options:
workdir = os.path.join(workdir, options['workdir'])
workdir = os.path.abspath(workdir)
# guest process executable and command line arguments
#
# first try to configure the fully qualified path for the guest
# executable by joining workdir and configured exectuable ..
exe = os.path.abspath(os.path.join(workdir, config['executable']))
if check_executable(exe):
self.log.info("Using guest worker executable '{exe}' (executable path taken from configuration)",
exe=exe)
else:
# try to detect the fully qualified path for the guest
# executable by doing a "which" on the configured executable name
exe = shutil.which(config['executable'])
if exe is not None and check_executable(exe):
self.log.info("Using guest worker executable '{exe}' (executable path detected from environment)",
exe=exe)
else:
emsg = "ERROR: could not start worker - could not find and executable for '{}'".format(config['executable'])
self.log.error(emsg)
raise ApplicationError('crossbar.error.invalid_configuration', emsg)
# guest process command line arguments
#
args = [exe]
args.extend(config.get('arguments', []))
# guest process environment
#
worker_env = create_process_env(options)
# log name of worker
#
worker_logname = 'Guest'
# topic URIs used (later)
#
starting_topic = 'crossbar.node.{}.on_guest_starting'.format(self._node_id)
started_topic = 'crossbar.node.{}.on_guest_started'.format(self._node_id)
# add worker tracking instance to the worker map ..
#
worker = GuestWorkerProcess(self, id, details.caller, keeplog=options.get('traceback', None))
self._workers[id] = worker
# create a (custom) process endpoint
#
ep = WorkerProcessEndpoint(self._node._reactor, exe, args, path=workdir, env=worker_env, worker=worker)
# ready handling
#
def on_ready_success(proto):
worker.pid = proto.transport.pid
worker.status = 'started'
worker.started = datetime.utcnow()
self.log.info("{worker} with ID '{id}' and PID {pid} started",
worker=worker_logname, id=worker.id, pid=worker.pid)
self._node._reactor.addSystemEventTrigger(
'before', 'shutdown',
self._cleanup_worker, self._node._reactor, worker,
)
# directory watcher
#
if 'watch' in options:
if HAS_FSNOTIFY:
# assemble list of watched directories
watched_dirs = []
for d in options['watch'].get('directories', []):
watched_dirs.append(os.path.abspath(os.path.join(self._node._cbdir, d)))
# create a directory watcher
worker.watcher = DirWatcher(dirs=watched_dirs, notify_once=True)
# make sure to stop the background thread running inside the
# watcher upon Twisted being shut down
def on_shutdown():
worker.watcher.stop()
reactor.addSystemEventTrigger('before', 'shutdown', on_shutdown)
# this handler will get fired by the watcher upon detecting an FS event
def on_fsevent(evt):
worker.watcher.stop()
proto.signal('TERM')
if options['watch'].get('action', None) == 'restart':
self.log.info("Restarting guest ..")
reactor.callLater(0.1, self.start_guest, id, config, details)
# now run the watcher on a background thread
deferToThread(worker.watcher.loop, on_fsevent)
else:
self.log.warn("Warning: cannot watch directory for changes - feature DirWatcher unavailable")
# assemble guest worker startup information
#
started_info = {
'id': worker.id,
'status': worker.status,
'started': utcstr(worker.started),
'who': worker.who
}
self.publish(started_topic, started_info, options=PublishOptions(exclude=[details.caller]))
return started_info
def on_ready_error(err):
del self._workers[worker.id]
emsg = 'ERROR: failed to start guest worker - {}'.format(err.value)
self.log.error(emsg)
raise ApplicationError("crossbar.error.cannot_start", emsg, ep.getlog())
worker.ready.addCallbacks(on_ready_success, on_ready_error)
def on_exit_success(res):
self.log.info("Guest {id} exited with success", id=worker.id)
del self._workers[worker.id]
def on_exit_error(err):
self.log.error("Guest {id} exited with error {err.value}",
id=worker.id, err=err)
del self._workers[worker.id]
worker.exit.addCallbacks(on_exit_success, on_exit_error)
# create a transport factory for talking WAMP to the native worker
#
transport_factory = create_guest_worker_client_factory(config, worker.ready, worker.exit)
transport_factory.noisy = False
self._workers[id].factory = transport_factory
# now (immediately before actually forking) signal the starting of the worker
#
starting_info = {
'id': id,
'status': worker.status,
'created': utcstr(worker.created),
'who': worker.who
}
# the caller gets a progressive result ..
if details.progress:
details.progress(starting_info)
# .. while all others get an event
self.publish(starting_topic, starting_info, options=PublishOptions(exclude=[details.caller]))
# now actually fork the worker ..
#
self.log.info("Starting {worker} with ID '{id}'...",
worker=worker_logname, id=id)
self.log.debug("{worker} '{id}' using command line '{cli}'...",
worker=worker_logname, id=id, cli=' '.join(args))
d = ep.connect(transport_factory)
def on_connect_success(proto):
# this seems to be called immediately when the child process
# has been forked. even if it then immediately fails because
# e.g. the executable doesn't even exist. in other words,
# I'm not sure under what conditions the deferred will
# errback - probably only if the forking of a new process fails
# at OS level due to out of memory conditions or such.
pid = proto.transport.pid
self.log.debug("Guest worker process connected with PID {pid}",
pid=pid)
worker.pid = pid
# proto is an instance of GuestWorkerClientProtocol
worker.proto = proto
worker.status = 'connected'
worker.connected = datetime.utcnow()
def on_connect_error(err):
# not sure when this errback is triggered at all .. see above.
self.log.error("ERROR: Connecting forked guest worker failed - {}".format(err))
# in any case, forward the error ..
worker.ready.errback(err)
d.addCallbacks(on_connect_success, on_connect_error)
return worker.ready
def stop_guest(self, id, kill=False, details=None):
"""
Stops a currently running guest worker.
:param id: The ID of the guest worker to stop.
:type id: str
"""
self.log.debug("NodeControllerSession.stop_guest({id}, kill={kill})",
id=id, kill=kill)
if id not in self._workers or self._workers[id].worker_type != 'guest':
emsg = "ERROR: no guest worker with ID '{}' currently running".format(id)
raise ApplicationError('crossbar.error.worker_not_running', emsg)
try:
if kill:
self._workers[id].proto.transport.signalProcess("KILL")
else:
self._workers[id].proto.transport.loseConnection()
except Exception as e:
emsg = "ERROR: could not stop guest worker '{}' - {}".format(id, e)
raise ApplicationError('crossbar.error.stop_worker_failed', emsg)
else:
del self._workers[id]
def create_process_env(options):
"""
Create worker process environment dictionary.
"""
penv = {}
# by default, a worker/guest process inherits
# complete environment
inherit_all = True
# check/inherit parent process environment
if 'env' in options and 'inherit' in options['env']:
inherit = options['env']['inherit']
if isinstance(inherit, bool):
inherit_all = inherit
elif isinstance(inherit, list):
inherit_all = False
for v in inherit:
if v in os.environ:
penv[v] = os.environ[v]
if inherit_all:
# must do deepcopy like this (os.environ is a "special" thing ..)
for k, v in os.environ.items():
penv[k] = v
# explicit environment vars from config
if 'env' in options and 'vars' in options['env']:
for k, v in options['env']['vars'].items():
penv[k] = v
return penv
|
alexallah/django | refs/heads/master | django/contrib/staticfiles/utils.py | 41 | import fnmatch
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def matches_patterns(path, patterns=None):
"""
Return True or False depending on whether the ``path`` should be
ignored (if it matches any pattern in ``ignore_patterns``).
"""
if patterns is None:
patterns = []
for pattern in patterns:
if fnmatch.fnmatchcase(path, pattern):
return True
return False
def get_files(storage, ignore_patterns=None, location=''):
"""
Recursively walk the storage directories yielding the paths
of all files that should be copied.
"""
if ignore_patterns is None:
ignore_patterns = []
directories, files = storage.listdir(location)
for fn in files:
if matches_patterns(fn, ignore_patterns):
continue
if location:
fn = os.path.join(location, fn)
yield fn
for dir in directories:
if matches_patterns(dir, ignore_patterns):
continue
if location:
dir = os.path.join(location, dir)
yield from get_files(storage, ignore_patterns, dir)
def check_settings(base_url=None):
"""
Check if the staticfiles settings have sane values.
"""
if base_url is None:
base_url = settings.STATIC_URL
if not base_url:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
"settings must have different values")
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
"settings must have different values")
|
andreabedini/PyTables | refs/heads/develop | tables/tests/test_carray.py | 5 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import os
import sys
import numpy
import tables
from tables import (
Atom, StringAtom, IntAtom, Int8Atom, Int16Atom, Int32Atom, Int64Atom,
FloatAtom, Float32Atom,
)
from tables.tests import common
from tables.tests.common import allequal
from tables.tests.common import unittest
from tables.tests.common import PyTablesTestCase as TestCase
from six.moves import range
class BasicTestCase(common.TempFileMixin, TestCase):
# Default values
obj = None
flavor = "numpy"
type = 'int32'
shape = (2, 2)
start = 0
stop = 10
step = 1
length = 1
chunkshape = (5, 5)
compress = 0
complib = "zlib" # Default compression library
shuffle = 0
fletcher32 = 0
reopen = 1 # Tells whether the file has to be reopened on each test or not
def setUp(self):
super(BasicTestCase, self).setUp()
# Create an instance of an HDF5 Table
self.rootgroup = self.h5file.root
self.populateFile()
if self.reopen:
# Close the file
self.h5file.close()
def populateFile(self):
group = self.rootgroup
obj = self.obj
if obj is None:
if self.type == "string":
atom = StringAtom(itemsize=self.length)
else:
atom = Atom.from_type(self.type)
else:
atom = None
title = self.__class__.__name__
filters = tables.Filters(complevel=self.compress,
complib=self.complib,
shuffle=self.shuffle,
fletcher32=self.fletcher32)
carray = self.h5file.create_carray(group, 'carray1',
atom=atom, shape=self.shape,
title=title, filters=filters,
chunkshape=self.chunkshape, obj=obj)
carray.flavor = self.flavor
# Fill it with data
self.rowshape = list(carray.shape)
self.objsize = self.length * numpy.prod(carray.shape)
if self.flavor == "numpy":
if self.type == "string":
object = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.shape,
dtype="S%s" % carray.atom.itemsize)
else:
object = numpy.arange(self.objsize, dtype=carray.atom.dtype)
object.shape = carray.shape
if common.verbose:
print("Object to append -->", repr(object))
carray[...] = object
def _get_shape(self):
if self.shape is not None:
shape = self.shape
else:
shape = numpy.asarray(self.obj).shape
return shape
def test00_attributes(self):
if self.reopen:
self.h5file = tables.open_file(self.h5fname, "r")
obj = self.h5file.get_node("/carray1")
shape = self._get_shape()
self.assertEqual(obj.flavor, self.flavor)
self.assertEqual(obj.shape, shape)
self.assertEqual(obj.ndim, len(shape))
self.assertEqual(obj.chunkshape, self.chunkshape)
self.assertEqual(obj.nrows, shape[0])
self.assertEqual(obj.atom.type, self.type)
def test01_readCArray(self):
"""Checking read() of chunked layout arrays."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_readCArray..." % self.__class__.__name__)
# Create an instance of an HDF5 Table
if self.reopen:
self.h5file = tables.open_file(self.h5fname, "r")
carray = self.h5file.get_node("/carray1")
# Choose a small value for buffer size
carray.nrowsinbuf = 3
if common.verbose:
print("CArray descr:", repr(carray))
print("shape of read array ==>", carray.shape)
print("reopening?:", self.reopen)
shape = self._get_shape()
# Build the array to do comparisons
if self.flavor == "numpy":
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.shape,
dtype="S%s" % carray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=carray.atom.dtype)
object_.shape = shape
stop = self.stop
# stop == None means read only the element designed by start
# (in read() contexts)
if self.stop is None:
if self.start == -1: # corner case
stop = carray.nrows
else:
stop = self.start + 1
# Protection against number of elements less than existing
# if rowshape[self.extdim] < self.stop or self.stop == 0:
if carray.nrows < stop:
# self.stop == 0 means last row only in read()
# and not in [::] slicing notation
stop = int(carray.nrows)
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
obj = object_[self.start:stop:self.step].copy()
# Read all the array
try:
data = carray.read(self.start, stop, self.step)
except IndexError:
if self.flavor == "numpy":
data = numpy.empty(shape=self.shape, dtype=self.type)
else:
data = numpy.empty(shape=self.shape, dtype=self.type)
if common.verbose:
if hasattr(obj, "shape"):
print("shape should look as:", obj.shape)
print("Object read ==>", repr(data))
print("Should look like ==>", repr(obj))
if hasattr(data, "shape"):
self.assertEqual(len(data.shape), len(shape))
else:
# Scalar case
self.assertEqual(len(self.shape), 1)
self.assertEqual(carray.chunkshape, self.chunkshape)
self.assertTrue(allequal(data, obj, self.flavor))
def test01_readCArray_out_argument(self):
"""Checking read() of chunked layout arrays."""
# Create an instance of an HDF5 Table
if self.reopen:
self.h5file = tables.open_file(self.h5fname, "r")
carray = self.h5file.get_node("/carray1")
shape = self._get_shape()
# Choose a small value for buffer size
carray.nrowsinbuf = 3
# Build the array to do comparisons
if self.flavor == "numpy":
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.shape,
dtype="S%s" % carray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=carray.atom.dtype)
object_.shape = shape
stop = self.stop
# stop == None means read only the element designed by start
# (in read() contexts)
if self.stop is None:
if self.start == -1: # corner case
stop = carray.nrows
else:
stop = self.start + 1
# Protection against number of elements less than existing
# if rowshape[self.extdim] < self.stop or self.stop == 0:
if carray.nrows < stop:
# self.stop == 0 means last row only in read()
# and not in [::] slicing notation
stop = int(carray.nrows)
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
obj = object_[self.start:stop:self.step].copy()
# Read all the array
try:
data = numpy.empty(shape, dtype=carray.atom.dtype)
data = data[self.start:stop:self.step].copy()
carray.read(self.start, stop, self.step, out=data)
except IndexError:
if self.flavor == "numpy":
data = numpy.empty(shape=shape, dtype=self.type)
else:
data = numpy.empty(shape=shape, dtype=self.type)
if hasattr(data, "shape"):
self.assertEqual(len(data.shape), len(shape))
else:
# Scalar case
self.assertEqual(len(shape), 1)
self.assertEqual(carray.chunkshape, self.chunkshape)
self.assertTrue(allequal(data, obj, self.flavor))
def test02_getitemCArray(self):
"""Checking chunked layout array __getitem__ special method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_getitemCArray..." %
self.__class__.__name__)
if not hasattr(self, "slices"):
# If there is not a slices attribute, create it
self.slices = (slice(self.start, self.stop, self.step),)
# Create an instance of an HDF5 Table
if self.reopen:
self.h5file = tables.open_file(self.h5fname, "r")
carray = self.h5file.get_node("/carray1")
if common.verbose:
print("CArray descr:", repr(carray))
print("shape of read array ==>", carray.shape)
print("reopening?:", self.reopen)
shape = self._get_shape()
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.shape,
dtype="S%s" % carray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=carray.atom.dtype)
object_.shape = shape
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
obj = object_.__getitem__(self.slices).copy()
# Read data from the array
try:
data = carray.__getitem__(self.slices)
except IndexError:
print("IndexError!")
if self.flavor == "numpy":
data = numpy.empty(shape=self.shape, dtype=self.type)
else:
data = numpy.empty(shape=self.shape, dtype=self.type)
if common.verbose:
print("Object read:\n", repr(data)) # , data.info()
print("Should look like:\n", repr(obj)) # , object.info()
if hasattr(obj, "shape"):
print("Original object shape:", self.shape)
print("Shape read:", data.shape)
print("shape should look as:", obj.shape)
if not hasattr(data, "shape"):
# Scalar case
self.assertEqual(len(self.shape), 1)
self.assertEqual(carray.chunkshape, self.chunkshape)
self.assertTrue(allequal(data, obj, self.flavor))
def test03_setitemCArray(self):
"""Checking chunked layout array __setitem__ special method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03_setitemCArray..." %
self.__class__.__name__)
if not hasattr(self, "slices"):
# If there is not a slices attribute, create it
self.slices = (slice(self.start, self.stop, self.step),)
# Create an instance of an HDF5 Table
if self.reopen:
self.h5file = tables.open_file(self.h5fname, "a")
carray = self.h5file.get_node("/carray1")
if common.verbose:
print("CArray descr:", repr(carray))
print("shape of read array ==>", carray.shape)
print("reopening?:", self.reopen)
shape = self._get_shape()
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.shape,
dtype="S%s" % carray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=carray.atom.dtype)
object_.shape = shape
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
obj = object_.__getitem__(self.slices).copy()
if self.type == "string":
if hasattr(self, "wslice"):
obj[self.wslize] = "xXx"
carray[self.wslice] = "xXx"
elif sum(obj[self.slices].shape) != 0:
obj[:] = "xXx"
if obj.size > 0:
carray[self.slices] = obj
else:
if hasattr(self, "wslice"):
obj[self.wslice] = obj[self.wslice] * 2 + 3
carray[self.wslice] = carray[self.wslice] * 2 + 3
elif sum(obj[self.slices].shape) != 0:
obj = obj * 2 + 3
if numpy.prod(obj.shape) > 0:
carray[self.slices] = carray[self.slices] * 2 + 3
# Cast again object to its original type
obj = numpy.array(obj, dtype=carray.atom.dtype)
# Read datafrom the array
try:
data = carray.__getitem__(self.slices)
except IndexError:
print("IndexError!")
if self.flavor == "numpy":
data = numpy.empty(shape=self.shape, dtype=self.type)
else:
data = numpy.empty(shape=self.shape, dtype=self.type)
if common.verbose:
print("Object read:\n", repr(data)) # , data.info()
print("Should look like:\n", repr(obj)) # , object.info()
if hasattr(obj, "shape"):
print("Original object shape:", self.shape)
print("Shape read:", data.shape)
print("shape should look as:", obj.shape)
if not hasattr(data, "shape"):
# Scalar case
self.assertEqual(len(self.shape), 1)
self.assertEqual(carray.chunkshape, self.chunkshape)
self.assertTrue(allequal(data, obj, self.flavor))
class BasicWriteTestCase(BasicTestCase):
type = 'int32'
shape = (2,)
chunkshape = (5,)
step = 1
wslice = 1 # single element case
class BasicWrite2TestCase(BasicTestCase):
type = 'int32'
shape = (2,)
chunkshape = (5,)
step = 1
wslice = slice(shape[0]-2, shape[0], 2) # range of elements
reopen = 0 # This case does not reopen files
class BasicWrite3TestCase(BasicTestCase):
obj = [1, 2]
type = numpy.asarray(obj).dtype.name
shape = None
chunkshape = (5,)
step = 1
reopen = 0 # This case does not reopen files
class BasicWrite4TestCase(BasicTestCase):
obj = numpy.array([1, 2])
type = obj.dtype.name
shape = None
chunkshape = (5,)
step = 1
reopen = 0 # This case does not reopen files
class BasicWrite5TestCase(BasicTestCase):
obj = [[1, 2], [3, 4]]
type = numpy.asarray(obj).dtype.name
shape = None
chunkshape = (5, 1)
step = 1
reopen = 0 # This case does not reopen files
class BasicWrite6TestCase(BasicTestCase):
obj = [1, 2]
type = numpy.asarray(obj).dtype.name
shape = None
chunkshape = (5,)
step = 1
reopen = 1 # This case does reopen files
class BasicWrite7TestCase(BasicTestCase):
obj = numpy.array([1, 2])
type = obj.dtype.name
shape = None
chunkshape = (5,)
step = 1
reopen = 1 # This case does reopen files
class BasicWrite8TestCase(BasicTestCase):
obj = [[1, 2], [3, 4]]
type = numpy.asarray(obj).dtype.name
shape = None
chunkshape = (5, 1)
step = 1
reopen = 1 # This case does reopen files
class EmptyCArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 2)
chunkshape = (5, 5)
start = 0
stop = 10
step = 1
class EmptyCArray2TestCase(BasicTestCase):
type = 'int32'
shape = (2, 2)
chunkshape = (5, 5)
start = 0
stop = 10
step = 1
reopen = 0 # This case does not reopen files
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class SlicesCArrayTestCase(BasicTestCase):
compress = 1
complib = "lzo"
type = 'int32'
shape = (2, 2)
chunkshape = (5, 5)
slices = (slice(1, 2, 1), slice(1, 3, 1))
class EllipsisCArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 2)
chunkshape = (5, 5)
# slices = (slice(1,2,1), Ellipsis)
slices = (Ellipsis, slice(1, 2, 1))
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class Slices2CArrayTestCase(BasicTestCase):
compress = 1
complib = "lzo"
type = 'int32'
shape = (2, 2, 4)
chunkshape = (5, 5, 5)
slices = (slice(1, 2, 1), slice(None, None, None), slice(1, 4, 2))
class Ellipsis2CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 2, 4)
chunkshape = (5, 5, 5)
slices = (slice(1, 2, 1), Ellipsis, slice(1, 4, 2))
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class Slices3CArrayTestCase(BasicTestCase):
compress = 1 # To show the chunks id DEBUG is on
complib = "lzo"
type = 'int32'
shape = (2, 3, 4, 2)
chunkshape = (5, 5, 5, 5)
slices = (slice(1, 2, 1), slice(
0, None, None), slice(1, 4, 2)) # Don't work
# slices = (slice(None, None, None), slice(0, None, None),
# slice(1,4,1)) # W
# slices = (slice(None, None, None), slice(None, None, None),
# slice(1,4,2)) # N
# slices = (slice(1,2,1), slice(None, None, None), slice(1,4,2)) # N
# Disable the failing test temporarily with a working test case
slices = (slice(1, 2, 1), slice(1, 4, None), slice(1, 4, 2)) # Y
# slices = (slice(1,2,1), slice(0, 4, None), slice(1,4,1)) # Y
slices = (slice(1, 2, 1), slice(0, 4, None), slice(1, 4, 2)) # N
# slices = (slice(1,2,1), slice(0, 4, None), slice(1,4,2),
# slice(0,100,1)) # N
class Slices4CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 2, 5, 6)
chunkshape = (5, 5, 5, 5, 5, 5)
slices = (slice(1, 2, 1), slice(0, None, None), slice(1, 4, 2),
slice(0, 4, 2), slice(3, 5, 2), slice(2, 7, 1))
class Ellipsis3CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 2)
chunkshape = (5, 5, 5, 5)
slices = (Ellipsis, slice(0, 4, None), slice(1, 4, 2))
slices = (slice(1, 2, 1), slice(0, 4, None), slice(1, 4, 2), Ellipsis)
class Ellipsis4CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 5)
chunkshape = (5, 5, 5, 5)
slices = (Ellipsis, slice(0, 4, None), slice(1, 4, 2))
slices = (slice(1, 2, 1), Ellipsis, slice(1, 4, 2))
class Ellipsis5CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 5)
chunkshape = (5, 5, 5, 5)
slices = (slice(1, 2, 1), slice(0, 4, None), Ellipsis)
class Ellipsis6CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 5)
chunkshape = (5, 5, 5, 5)
# The next slices gives problems with setting values (test03)
# This is a problem on the test design, not the Array.__setitem__
# code, though. See # see test_earray.py Ellipsis6EArrayTestCase
slices = (slice(1, 2, 1), slice(0, 4, None), 2, Ellipsis)
class Ellipsis7CArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 5)
chunkshape = (5, 5, 5, 5)
slices = (slice(1, 2, 1), slice(0, 4, None), slice(2, 3), Ellipsis)
class MD3WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 2, 3)
chunkshape = (4, 4, 4)
step = 2
class MD5WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 2, 3, 4, 5) # ok
# shape = (1, 1, 2, 1) # Minimum shape that shows problems with HDF5 1.6.1
# shape = (2, 3, 2, 4, 5) # Floating point exception (HDF5 1.6.1)
# shape = (2, 3, 3, 2, 5, 6) # Segmentation fault (HDF5 1.6.1)
chunkshape = (1, 1, 1, 1, 1)
start = 1
stop = 10
step = 10
class MD6WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 3, 2, 5, 6)
chunkshape = (1, 1, 1, 1, 5, 6)
start = 1
stop = 10
step = 3
class MD6WriteTestCase__(BasicTestCase):
type = 'int32'
shape = (2, 2)
chunkshape = (1, 1)
start = 1
stop = 3
step = 1
class MD7WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 3, 4, 5, 2, 3)
chunkshape = (10, 10, 10, 10, 10, 10, 10)
start = 1
stop = 10
step = 2
class MD10WriteTestCase(BasicTestCase):
type = 'int32'
shape = (1, 2, 3, 4, 5, 5, 4, 3, 2, 2)
chunkshape = (5, 5, 5, 5, 5, 5, 5, 5, 5, 5)
start = -1
stop = -1
step = 10
class ZlibComprTestCase(BasicTestCase):
compress = 1
complib = "zlib"
start = 3
# stop = 0 # means last row
stop = None # means last row from 0.8 on
step = 10
class ZlibShuffleTestCase(BasicTestCase):
shuffle = 1
compress = 1
complib = "zlib"
# case start < stop , i.e. no rows read
start = 3
stop = 1
step = 10
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class BloscComprTestCase(BasicTestCase):
compress = 1 # sss
complib = "blosc"
chunkshape = (10, 10)
start = 3
stop = 10
step = 3
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class BloscShuffleTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "blosc"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class BloscFletcherTestCase(BasicTestCase):
# see gh-21
shape = (200, 300)
compress = 1
shuffle = 1
fletcher32 = 1
complib = "blosc"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class BloscBloscLZTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "blosc:blosclz"
chunkshape = (200, 100)
start = 2
stop = 11
step = 7
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
@unittest.skipIf('lz4' not in tables.blosc_compressor_list(), 'lz4 required')
class BloscLZ4TestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "blosc:lz4"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
@unittest.skipIf('lz4' not in tables.blosc_compressor_list(), 'lz4 required')
class BloscLZ4HCTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "blosc:lz4hc"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
@unittest.skipIf('snappy' not in tables.blosc_compressor_list(),
'snappy required')
class BloscSnappyTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "blosc:snappy"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
@unittest.skipIf('zlib' not in tables.blosc_compressor_list(), 'zlib required')
class BloscZlibTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "blosc:zlib"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class LZOComprTestCase(BasicTestCase):
compress = 1 # sss
complib = "lzo"
chunkshape = (10, 10)
start = 3
stop = 10
step = 3
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class LZOShuffleTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "lzo"
chunkshape = (100, 100)
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.bzip2_avail,
'BZIP2 compression library not available')
class Bzip2ComprTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
complib = "bzip2"
chunkshape = (100, 100)
start = 3
stop = 10
step = 8
@unittest.skipIf(not common.bzip2_avail,
'BZIP2 compression library not available')
class Bzip2ShuffleTestCase(BasicTestCase):
shape = (20, 30)
compress = 1
shuffle = 1
complib = "bzip2"
chunkshape = (100, 100)
start = 3
stop = 10
step = 6
class Fletcher32TestCase(BasicTestCase):
shape = (60, 50)
compress = 0
fletcher32 = 1
chunkshape = (50, 50)
start = 4
stop = 20
step = 7
class AllFiltersTestCase(BasicTestCase):
compress = 1
shuffle = 1
fletcher32 = 1
complib = "zlib"
chunkshape = (20, 20) # sss
start = 2
stop = 99
step = 6
class FloatTypeTestCase(BasicTestCase):
type = 'float64'
shape = (2, 2)
chunkshape = (5, 5)
start = 3
stop = 10
step = 20
class ComplexTypeTestCase(BasicTestCase):
type = 'complex128'
shape = (2, 2)
chunkshape = (5, 5)
start = 3
stop = 10
step = 20
class StringTestCase(BasicTestCase):
type = "string"
length = 20
shape = (2, 2)
# shape = (2,2,20)
chunkshape = (5, 5)
start = 3
stop = 10
step = 20
slices = (slice(0, 1), slice(1, 2))
class String2TestCase(BasicTestCase):
type = "string"
length = 20
shape = (2, 20)
chunkshape = (5, 5)
start = 1
stop = 10
step = 2
class StringComprTestCase(BasicTestCase):
type = "string"
length = 20
shape = (20, 2, 10)
# shape = (20,0,10,20)
compr = 1
# shuffle = 1 # this shouldn't do nothing on chars
chunkshape = (50, 50, 2)
start = -1
stop = 100
step = 20
class Int8TestCase(BasicTestCase):
type = "int8"
shape = (2, 2)
compress = 1
shuffle = 1
chunkshape = (50, 50)
start = -1
stop = 100
step = 20
class Int16TestCase(BasicTestCase):
type = "int16"
shape = (2, 2)
compress = 1
shuffle = 1
chunkshape = (50, 50)
start = 1
stop = 100
step = 1
class Int32TestCase(BasicTestCase):
type = "int32"
shape = (2, 2)
compress = 1
shuffle = 1
chunkshape = (50, 50)
start = -1
stop = 100
step = 20
@unittest.skipUnless(hasattr(tables, 'Float16Atom'),
'Float16Atom not available')
class Float16TestCase(BasicTestCase):
type = "float16"
shape = (200,)
compress = 1
shuffle = 1
chunkshape = (20,)
start = -1
stop = 100
step = 20
class Float32TestCase(BasicTestCase):
type = "float32"
shape = (200,)
compress = 1
shuffle = 1
chunkshape = (20,)
start = -1
stop = 100
step = 20
class Float64TestCase(BasicTestCase):
type = "float64"
shape = (200,)
compress = 1
shuffle = 1
chunkshape = (20,)
start = -1
stop = 100
step = 20
@unittest.skipUnless(hasattr(tables, 'Float96Atom'),
'Float96Atom not available')
class Float96TestCase(BasicTestCase):
type = "float96"
shape = (200,)
compress = 1
shuffle = 1
chunkshape = (20,)
start = -1
stop = 100
step = 20
@unittest.skipUnless(hasattr(tables, 'Float128Atom'),
'Float128Atom not available')
class Float128TestCase(BasicTestCase):
type = "float128"
shape = (200,)
compress = 1
shuffle = 1
chunkshape = (20,)
start = -1
stop = 100
step = 20
class Complex64TestCase(BasicTestCase):
type = "complex64"
shape = (4,)
compress = 1
shuffle = 1
chunkshape = (2,)
start = -1
stop = 100
step = 20
class Complex128TestCase(BasicTestCase):
type = "complex128"
shape = (20,)
compress = 1
shuffle = 1
chunkshape = (2,)
start = -1
stop = 100
step = 20
@unittest.skipUnless(hasattr(tables, 'Complex192Atom'),
'Complex192Atom not available')
class Complex192TestCase(BasicTestCase):
type = "complex192"
shape = (20,)
compress = 1
shuffle = 1
chunkshape = (2,)
start = -1
stop = 100
step = 20
@unittest.skipUnless(hasattr(tables, 'Complex256Atom'),
'Complex256Atom not available')
class Complex256TestCase(BasicTestCase):
type = "complex256"
shape = (20,)
compress = 1
shuffle = 1
chunkshape = (2,)
start = -1
stop = 100
step = 20
class ComprTestCase(BasicTestCase):
type = "float64"
compress = 1
shuffle = 1
shape = (200,)
compr = 1
chunkshape = (21,)
start = 51
stop = 100
step = 7
# this is a subset of the tests in test_array.py, mostly to verify that errors
# are handled in the same way
class ReadOutArgumentTests(common.TempFileMixin, TestCase):
def setUp(self):
super(ReadOutArgumentTests, self).setUp()
self.size = 1000
self.filters = tables.Filters(complevel=1, complib='blosc')
def create_array(self):
array = numpy.arange(self.size, dtype='i8')
disk_array = self.h5file.create_carray('/', 'array', atom=Int64Atom(),
shape=(self.size, ),
filters=self.filters)
disk_array[:] = array
return array, disk_array
def test_read_entire_array(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size, ), 'i8')
disk_array.read(out=out_buffer)
numpy.testing.assert_equal(out_buffer, array)
def test_read_non_contiguous_buffer(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size, ), 'i8')
out_buffer_slice = out_buffer[0:self.size:2]
# once Python 2.6 support is dropped, this could change
# to assertRaisesRegexp to check exception type and message at once
self.assertRaises(ValueError, disk_array.read, 0, self.size, 2,
out_buffer_slice)
try:
disk_array.read(0, self.size, 2, out_buffer_slice)
except ValueError as exc:
self.assertEqual('output array not C contiguous', str(exc))
def test_buffer_too_small(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size // 2, ), 'i8')
self.assertRaises(ValueError, disk_array.read, 0, self.size, 1,
out_buffer)
try:
disk_array.read(0, self.size, 1, out_buffer)
except ValueError as exc:
self.assertTrue('output array size invalid, got' in str(exc))
class SizeOnDiskInMemoryPropertyTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(SizeOnDiskInMemoryPropertyTestCase, self).setUp()
self.array_size = (10000, 10)
# set chunkshape so it divides evenly into array_size, to avoid
# partially filled chunks
self.chunkshape = (1000, 10)
# approximate size (in bytes) of non-data portion of hdf5 file
self.hdf_overhead = 6000
def create_array(self, complevel):
filters = tables.Filters(complevel=complevel, complib='blosc')
self.array = self.h5file.create_carray('/', 'somearray',
atom=Int16Atom(),
shape=self.array_size,
filters=filters,
chunkshape=self.chunkshape)
def test_no_data(self):
complevel = 0
self.create_array(complevel)
self.assertEqual(self.array.size_on_disk, 0)
self.assertEqual(self.array.size_in_memory, 10000 * 10 * 2)
def test_data_no_compression(self):
complevel = 0
self.create_array(complevel)
self.array[:] = 1
self.assertEqual(self.array.size_on_disk, 10000 * 10 * 2)
self.assertEqual(self.array.size_in_memory, 10000 * 10 * 2)
def test_highly_compressible_data(self):
complevel = 1
self.create_array(complevel)
self.array[:] = 1
self.h5file.flush()
file_size = os.stat(self.h5fname).st_size
self.assertTrue(
abs(self.array.size_on_disk - file_size) <= self.hdf_overhead)
self.assertTrue(self.array.size_on_disk < self.array.size_in_memory)
self.assertEqual(self.array.size_in_memory, 10000 * 10 * 2)
# XXX
def test_random_data(self):
complevel = 1
self.create_array(complevel)
self.array[:] = numpy.random.randint(0, 1e6, self.array_size)
self.h5file.flush()
file_size = os.stat(self.h5fname).st_size
self.assertTrue(
abs(self.array.size_on_disk - file_size) <= self.hdf_overhead)
# XXX: check. The test fails if blosc is not available
if tables.which_lib_version('blosc') is not None:
self.assertAlmostEqual(self.array.size_on_disk, 10000 * 10 * 2)
else:
self.assertTrue(
abs(self.array.size_on_disk - 10000 * 10 * 2) < 200)
class OffsetStrideTestCase(common.TempFileMixin, TestCase):
compress = 0
complib = "zlib" # Default compression library
def setUp(self):
super(OffsetStrideTestCase, self).setUp()
# Create an instance of an HDF5 Table
self.rootgroup = self.h5file.root
def test01a_String(self):
"""Checking carray with offseted NumPy strings appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01a_String..." % self.__class__.__name__)
shape = (3, 2, 2)
# Create an string atom
carray = self.h5file.create_carray(root, 'strings',
atom=StringAtom(itemsize=3),
shape=shape,
title="Array of strings",
chunkshape=(1, 2, 2))
a = numpy.array([[["a", "b"], [
"123", "45"], ["45", "123"]]], dtype="S3")
carray[0] = a[0, 1:]
a = numpy.array([[["s", "a"], [
"ab", "f"], ["s", "abc"], ["abc", "f"]]])
carray[1] = a[0, 2:]
# Read all the data:
data = carray.read()
if common.verbose:
print("Object read:", data)
print("Nrows in", carray._v_pathname, ":", carray.nrows)
print("Second row in carray ==>", data[1].tolist())
self.assertEqual(carray.nrows, 3)
self.assertEqual(data[0].tolist(), [[b"123", b"45"], [b"45", b"123"]])
self.assertEqual(data[1].tolist(), [[b"s", b"abc"], [b"abc", b"f"]])
self.assertEqual(len(data[0]), 2)
self.assertEqual(len(data[1]), 2)
def test01b_String(self):
"""Checking carray with strided NumPy strings appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01b_String..." % self.__class__.__name__)
shape = (3, 2, 2)
# Create an string atom
carray = self.h5file.create_carray(root, 'strings',
atom=StringAtom(itemsize=3),
shape=shape,
title="Array of strings",
chunkshape=(1, 2, 2))
a = numpy.array([[["a", "b"], [
"123", "45"], ["45", "123"]]], dtype="S3")
carray[0] = a[0, ::2]
a = numpy.array([[["s", "a"], [
"ab", "f"], ["s", "abc"], ["abc", "f"]]])
carray[1] = a[0, ::2]
# Read all the rows:
data = carray.read()
if common.verbose:
print("Object read:", data)
print("Nrows in", carray._v_pathname, ":", carray.nrows)
print("Second row in carray ==>", data[1].tolist())
self.assertEqual(carray.nrows, 3)
self.assertEqual(data[0].tolist(), [[b"a", b"b"], [b"45", b"123"]])
self.assertEqual(data[1].tolist(), [[b"s", b"a"], [b"s", b"abc"]])
self.assertEqual(len(data[0]), 2)
self.assertEqual(len(data[1]), 2)
def test02a_int(self):
"""Checking carray with offseted NumPy ints appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02a_int..." % self.__class__.__name__)
shape = (3, 3)
# Create an string atom
carray = self.h5file.create_carray(root, 'CAtom',
atom=Int32Atom(), shape=shape,
title="array of ints",
chunkshape=(1, 3))
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (0, 0, 0)], dtype='int32')
carray[0:2] = a[2:] # Introduce an offset
a = numpy.array([(1, 1, 1), (-1, 0, 0)], dtype='int32')
carray[2:3] = a[1:] # Introduce an offset
# Read all the rows:
data = carray.read()
if common.verbose:
print("Object read:", data)
print("Nrows in", carray._v_pathname, ":", carray.nrows)
print("Third row in carray ==>", data[2])
self.assertEqual(carray.nrows, 3)
self.assertTrue(allequal(data[
0], numpy.array([1, 1, 1], dtype='int32')))
self.assertTrue(allequal(data[
1], numpy.array([0, 0, 0], dtype='int32')))
self.assertTrue(allequal(data[
2], numpy.array([-1, 0, 0], dtype='int32')))
def test02b_int(self):
"""Checking carray with strided NumPy ints appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02b_int..." % self.__class__.__name__)
shape = (3, 3)
# Create an string atom
carray = self.h5file.create_carray(root, 'CAtom',
atom=Int32Atom(), shape=shape,
title="array of ints",
chunkshape=(1, 3))
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='int32')
carray[0:2] = a[::3] # Create an offset
a = numpy.array([(1, 1, 1), (-1, 0, 0)], dtype='int32')
carray[2:3] = a[::2] # Create an offset
# Read all the rows:
data = carray.read()
if common.verbose:
print("Object read:", data)
print("Nrows in", carray._v_pathname, ":", carray.nrows)
print("Third row in carray ==>", data[2])
self.assertEqual(carray.nrows, 3)
self.assertTrue(allequal(data[
0], numpy.array([0, 0, 0], dtype='int32')))
self.assertTrue(allequal(data[
1], numpy.array([3, 3, 3], dtype='int32')))
self.assertTrue(allequal(data[
2], numpy.array([1, 1, 1], dtype='int32')))
class CopyTestCase(common.TempFileMixin, TestCase):
def test01a_copy(self):
"""Checking CArray.copy() method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01a_copy..." % self.__class__.__name__)
# Create an CArray
shape = (2, 2)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[...] = numpy.array([[456, 2], [3, 457]], dtype='int16')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy it to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# self.assertEqual(array1.chunkshape, array2.chunkshape)
def test01b_copy(self):
"""Checking CArray.copy() method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01b_copy..." % self.__class__.__name__)
# Create an CArray
shape = (2, 2)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(5, 5))
array1[...] = numpy.array([[456, 2], [3, 457]], dtype='int16')
if self.close:
if common.verbose:
print("(closing h5fname version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy it to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing h5fname version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# By default, the chunkshape should be the same
self.assertEqual(array1.chunkshape, array2.chunkshape)
def test01c_copy(self):
"""Checking CArray.copy() method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01c_copy..." % self.__class__.__name__)
# Create an CArray
shape = (5, 5)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[:2, :2] = numpy.array([[456, 2], [3, 457]], dtype='int16')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy it to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# self.assertEqual(array1.chunkshape, array2.chunkshape)
def test02_copy(self):
"""Checking CArray.copy() method (where specified)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_copy..." % self.__class__.__name__)
# Create an CArray
shape = (5, 5)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[:2, :2] = numpy.array([[456, 2], [3, 457]], dtype='int16')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy to another location
group1 = self.h5file.create_group("/", "group1")
array2 = array1.copy(group1, 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.group1.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# self.assertEqual(array1.chunkshape, array2.chunkshape)
def test03a_copy(self):
"""Checking CArray.copy() method (python flavor)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03c_copy..." % self.__class__.__name__)
shape = (2, 2)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1.flavor = "python"
array1[...] = [[456, 2], [3, 457]]
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all elements are equal
self.assertEqual(array1.read(), array2.read())
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor) # Very important here!
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# self.assertEqual(array1.chunkshape, array2.chunkshape)
def test03b_copy(self):
"""Checking CArray.copy() method (string python flavor)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03d_copy..." % self.__class__.__name__)
shape = (2, 2)
atom = StringAtom(itemsize=4)
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1.flavor = "python"
array1[...] = [["456", "2"], ["3", "457"]]
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("type value-->", type(array2[:][0][0]))
print("value-->", array2[:])
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all elements are equal
self.assertEqual(array1.read(), array2.read())
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor) # Very important here!
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# self.assertEqual(array1.chunkshape, array2.chunkshape)
def test03c_copy(self):
"""Checking CArray.copy() method (chararray flavor)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03e_copy..." % self.__class__.__name__)
shape = (2, 2)
atom = StringAtom(itemsize=4)
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[...] = numpy.array([["456", "2"], ["3", "457"]], dtype="S4")
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor) # Very important here!
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# self.assertEqual(array1.chunkshape, array2.chunkshape)
def test04_copy(self):
"""Checking CArray.copy() method (checking title copying)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_copy..." % self.__class__.__name__)
# Create an CArray
shape = (2, 2)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[...] = numpy.array([[456, 2], [3, 457]], dtype='int16')
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy it to another Array
array2 = array1.copy('/', 'array2', title="title array2")
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
# Assert user attributes
if common.verbose:
print("title of destination array-->", array2.title)
self.assertEqual(array2.title, "title array2")
def test05_copy(self):
"""Checking CArray.copy() method (user attributes copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05_copy..." % self.__class__.__name__)
# Create an CArray
shape = (2, 2)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[...] = numpy.array([[456, 2], [3, 457]], dtype='int16')
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=1)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(array2.attrs.attr1, "attr1")
self.assertEqual(array2.attrs.attr2, 2)
def test05b_copy(self):
"""Checking CArray.copy() method (user attributes not copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05b_copy..." % self.__class__.__name__)
# Create an Array
shape = (2, 2)
atom = Int16Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
array1[...] = numpy.array([[456, 2], [3, 457]], dtype='int16')
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode="a")
array1 = self.h5file.root.array1
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=0)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(hasattr(array2.attrs, "attr1"), 0)
self.assertEqual(hasattr(array2.attrs, "attr2"), 0)
class CloseCopyTestCase(CopyTestCase):
close = 1
class OpenCopyTestCase(CopyTestCase):
close = 0
class CopyIndexTestCase(common.TempFileMixin, TestCase):
nrowsinbuf = 2
def test01_index(self):
"""Checking CArray.copy() method with indexes."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_index..." % self.__class__.__name__)
# Create an CArray
shape = (100, 2)
atom = Int32Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
r = numpy.arange(200, dtype='int32')
r.shape = shape
array1[...] = r
# Select a different buffer size:
array1.nrowsinbuf = self.nrowsinbuf
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
# The next line is commented out because a copy should not
# keep the same chunkshape anymore.
# F. Alted 2006-11-27
# assert array1.chunkshape == array2.chunkshape
self.assertEqual(r2.shape[0], array2.nrows)
def _test02_indexclosef(self):
"""Checking CArray.copy() method with indexes (close file version)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_indexclosef..." % self.__class__.__name__)
# Create an CArray
shape = (100, 2)
atom = Int32Atom()
array1 = self.h5file.create_carray(
self.h5file.root, 'array1', atom=atom, shape=shape,
title="title array1", chunkshape=(2, 2))
r = numpy.arange(200, dtype='int32')
r.shape = shape
array1[...] = r
# Select a different buffer size:
array1.nrowsinbuf = self.nrowsinbuf
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
# Close and reopen the file
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertEqual(array1.chunkshape, array2.chunkshape)
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
class CopyIndex1TestCase(CopyIndexTestCase):
nrowsinbuf = 1
start = 0
stop = 7
step = 1
class CopyIndex2TestCase(CopyIndexTestCase):
nrowsinbuf = 2
start = 0
stop = -1
step = 1
class CopyIndex3TestCase(CopyIndexTestCase):
nrowsinbuf = 3
start = 1
stop = 7
step = 1
class CopyIndex4TestCase(CopyIndexTestCase):
nrowsinbuf = 4
start = 0
stop = 6
step = 1
class CopyIndex5TestCase(CopyIndexTestCase):
nrowsinbuf = 2
start = 3
stop = 7
step = 1
class CopyIndex6TestCase(CopyIndexTestCase):
nrowsinbuf = 2
start = 3
stop = 6
step = 2
class CopyIndex7TestCase(CopyIndexTestCase):
start = 0
stop = 7
step = 10
class CopyIndex8TestCase(CopyIndexTestCase):
start = 6
stop = -1 # Negative values means starting from the end
step = 1
class CopyIndex9TestCase(CopyIndexTestCase):
start = 3
stop = 4
step = 1
class CopyIndex10TestCase(CopyIndexTestCase):
nrowsinbuf = 1
start = 3
stop = 4
step = 2
class CopyIndex11TestCase(CopyIndexTestCase):
start = -3
stop = -1
step = 2
class CopyIndex12TestCase(CopyIndexTestCase):
start = -1 # Should point to the last element
stop = None # None should mean the last element (including it)
step = 1
# The next test should be run only in **heavy** mode
class Rows64bitsTestCase(common.TempFileMixin, TestCase):
narows = 1000 * 1000 # each array will have 1 million entries
# narows = 1000 # for testing only
nanumber = 1000 * 3 # That should account for more than 2**31-1
def setUp(self):
super(Rows64bitsTestCase, self).setUp()
# Create an CArray
shape = (self.narows * self.nanumber,)
array = self.h5file.create_carray(
self.h5file.root, 'array',
atom=Int8Atom(), shape=shape,
filters=tables.Filters(complib='lzo', complevel=1))
# Fill the array
na = numpy.arange(self.narows, dtype='int8')
#~ for i in xrange(self.nanumber):
#~ s = slice(i * self.narows, (i + 1)*self.narows)
#~ array[s] = na
s = slice(0, self.narows)
array[s] = na
s = slice((self.nanumber-1)*self.narows, self.nanumber * self.narows)
array[s] = na
def test01_basiccheck(self):
"Some basic checks for carrays exceeding 2**31 rows"
array = self.h5file.root.array
if self.close:
if common.verbose:
# Check how many entries there are in the array
print("Before closing")
print("Entries:", array.nrows, type(array.nrows))
print("Entries:", array.nrows / (1000 * 1000), "Millions")
print("Shape:", array.shape)
# Re-open the file
self._reopen()
array = self.h5file.root.array
if common.verbose:
print("After re-open")
# Check how many entries there are in the array
if common.verbose:
print("Entries:", array.nrows, type(array.nrows))
print("Entries:", array.nrows / (1000 * 1000), "Millions")
print("Shape:", array.shape)
print("Last 10 elements-->", array[-10:])
stop = self.narows % 256
if stop > 127:
stop -= 256
start = stop - 10
# print("start, stop-->", start, stop)
print("Should look like:", numpy.arange(start, stop, dtype='int8'))
nrows = self.narows * self.nanumber
# check nrows
self.assertEqual(array.nrows, nrows)
# Check shape
self.assertEqual(array.shape, (nrows,))
# check the 10 first elements
self.assertTrue(allequal(array[:10], numpy.arange(10, dtype='int8')))
# check the 10 last elements
stop = self.narows % 256
if stop > 127:
stop -= 256
start = stop - 10
self.assertTrue(allequal(array[-10:],
numpy.arange(start, stop, dtype='int8')))
class Rows64bitsTestCase1(Rows64bitsTestCase):
close = 0
class Rows64bitsTestCase2(Rows64bitsTestCase):
close = 1
class BigArrayTestCase(common.TempFileMixin, TestCase):
shape = (3000000000,) # more than 2**31-1
def setUp(self):
super(BigArrayTestCase, self).setUp()
# This should be fast since disk space isn't actually allocated,
# so this case is OK for non-heavy test runs.
self.h5file.create_carray('/', 'array',
atom=Int8Atom(), shape=self.shape)
def test00_shape(self):
"""Check that the shape doesn't overflow."""
# See ticket #147.
self.assertEqual(self.h5file.root.array.shape, self.shape)
try:
self.assertEqual(len(self.h5file.root.array), self.shape[0])
except OverflowError:
# In python 2.4 calling "len(self.h5file.root.array)" raises
# an OverflowError also on 64bit platforms::
# OverflowError: __len__() should return 0 <= outcome < 2**31
if sys.version_info[:2] > (2, 4):
# This can't be avoided in 32-bit platforms.
self.assertTrue(self.shape[0] > numpy.iinfo(int).max,
"Array length overflowed but ``int`` "
"is wide enough.")
def test01_shape_reopen(self):
"""Check that the shape doesn't overflow after reopening."""
self._reopen('r')
self.test00_shape()
# Test for default values when creating arrays.
class DfltAtomTestCase(common.TempFileMixin, TestCase):
def test00_dflt(self):
"Check that Atom.dflt is honored (string version)."
# Create a CArray with default values
self.h5file.create_carray('/', 'bar',
atom=StringAtom(itemsize=5, dflt=b"abdef"),
shape=(10, 10))
if self.reopen:
self._reopen()
# Check the values
values = self.h5file.root.bar[:]
if common.verbose:
print("Read values:", values)
self.assertTrue(
allequal(values, numpy.array(["abdef"]*100, "S5").reshape(10, 10)))
def test01_dflt(self):
"Check that Atom.dflt is honored (int version)."
# Create a CArray with default values
self.h5file.create_carray('/', 'bar',
atom=IntAtom(dflt=1), shape=(10, 10))
if self.reopen:
self._reopen()
# Check the values
values = self.h5file.root.bar[:]
if common.verbose:
print("Read values:", values)
self.assertTrue(allequal(values, numpy.ones((10, 10), "i4")))
def test02_dflt(self):
"Check that Atom.dflt is honored (float version)."
# Create a CArray with default values
self.h5file.create_carray('/', 'bar',
atom=FloatAtom(dflt=1.134), shape=(10, 10))
if self.reopen:
self._reopen()
# Check the values
values = self.h5file.root.bar[:]
if common.verbose:
print("Read values:", values)
self.assertTrue(allequal(values, numpy.ones((10, 10), "f8")*1.134))
class DfltAtomNoReopen(DfltAtomTestCase):
reopen = False
class DfltAtomReopen(DfltAtomTestCase):
reopen = True
# Test for representation of defaults in atoms. Ticket #212.
class AtomDefaultReprTestCase(common.TempFileMixin, TestCase):
def test00a_zeros(self):
"""Testing default values. Zeros (scalar)."""
N = ()
atom = StringAtom(itemsize=3, shape=N, dflt=b"")
ca = self.h5file.create_carray('/', 'test', atom=atom, shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", repr(ca[0]))
print("Defaults-->", repr(ca.atom.dflt))
self.assertTrue(allequal(ca[0], numpy.zeros(N, 'S3')))
self.assertTrue(allequal(ca.atom.dflt, numpy.zeros(N, 'S3')))
def test00b_zeros(self):
"""Testing default values. Zeros (array)."""
N = 2
atom = StringAtom(itemsize=3, shape=N, dflt=b"")
ca = self.h5file.create_carray('/', 'test', atom=atom, shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", ca[0])
print("Defaults-->", ca.atom.dflt)
self.assertTrue(allequal(ca[0], numpy.zeros(N, 'S3')))
self.assertTrue(allequal(ca.atom.dflt, numpy.zeros(N, 'S3')))
def test01a_values(self):
"""Testing default values. Ones."""
N = 2
atom = Int32Atom(shape=N, dflt=1)
ca = self.h5file.create_carray('/', 'test', atom=atom, shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", ca[0])
print("Defaults-->", ca.atom.dflt)
self.assertTrue(allequal(ca[0], numpy.ones(N, 'i4')))
self.assertTrue(allequal(ca.atom.dflt, numpy.ones(N, 'i4')))
def test01b_values(self):
"""Testing default values. Generic value."""
N = 2
generic = 112.32
atom = Float32Atom(shape=N, dflt=generic)
ca = self.h5file.create_carray('/', 'test', atom=atom, shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", ca[0])
print("Defaults-->", ca.atom.dflt)
self.assertTrue(allequal(ca[0], numpy.ones(N, 'f4')*generic))
self.assertTrue(allequal(ca.atom.dflt, numpy.ones(N, 'f4')*generic))
def test02a_None(self):
"""Testing default values. None (scalar)."""
N = ()
atom = Int32Atom(shape=N, dflt=None)
ca = self.h5file.create_carray('/', 'test', atom=atom, shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", repr(ca[0]))
print("Defaults-->", repr(ca.atom.dflt))
self.assertTrue(allequal(ca.atom.dflt, numpy.zeros(N, 'i4')))
def test02b_None(self):
"""Testing default values. None (array)."""
N = 2
atom = Int32Atom(shape=N, dflt=None)
ca = self.h5file.create_carray('/', 'test', atom=atom, shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", ca[0])
print("Defaults-->", ca.atom.dflt)
self.assertTrue(allequal(ca.atom.dflt, numpy.zeros(N, 'i4')))
class AtomDefaultReprNoReopen(AtomDefaultReprTestCase):
reopen = False
class AtomDefaultReprReopen(AtomDefaultReprTestCase):
reopen = True
class TruncateTestCase(common.TempFileMixin, TestCase):
def test(self):
"""Test for unability to truncate Array objects."""
array1 = self.h5file.create_carray('/', 'array1', IntAtom(), [2, 2])
self.assertRaises(TypeError, array1.truncate, 0)
# Test for dealing with multidimensional atoms
class MDAtomTestCase(common.TempFileMixin, TestCase):
def test01a_assign(self):
"""Assign a row to a (unidimensional) CArray with a MD atom."""
# Create an CArray
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom((2, 2)), shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign one row
ca[0] = [[1, 3], [4, 5]]
self.assertEqual(ca.nrows, 1)
if common.verbose:
print("First row-->", ca[0])
self.assertTrue(allequal(ca[0], numpy.array([[1, 3], [4, 5]], 'i4')))
def test01b_assign(self):
"""Assign several rows to a (unidimensional) CArray with a MD atom."""
# Create an CArray
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom((2, 2)), shape=(3,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign three rows
ca[:] = [[[1]], [[2]], [[3]]] # Simple broadcast
self.assertEqual(ca.nrows, 3)
if common.verbose:
print("Third row-->", ca[2])
self.assertTrue(allequal(ca[2], numpy.array([[3, 3], [3, 3]], 'i4')))
def test02a_assign(self):
"""Assign a row to a (multidimensional) CArray with a MD atom."""
# Create an CArray
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom((2,)), shape=(1, 3))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign one row
ca[:] = [[[1, 3], [4, 5], [7, 9]]]
self.assertEqual(ca.nrows, 1)
if common.verbose:
print("First row-->", ca[0])
self.assertTrue(allequal(ca[0], numpy.array(
[[1, 3], [4, 5], [7, 9]], 'i4')))
def test02b_assign(self):
"""Assign several rows to a (multidimensional) CArray with
a MD atom."""
# Create an CArray
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom((2,)), shape=(3, 3))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign three rows
ca[:] = [[[1, -3], [4, -5], [-7, 9]],
[[-1, 3], [-4, 5], [7, -8]],
[[-2, 3], [-5, 5], [7, -9]]]
self.assertEqual(ca.nrows, 3)
if common.verbose:
print("Third row-->", ca[2])
self.assertTrue(
allequal(ca[2], numpy.array([[-2, 3], [-5, 5], [7, -9]], 'i4')))
def test03a_MDMDMD(self):
"""Complex assign of a MD array in a MD CArray with a MD atom."""
# Create an CArray
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom((2, 4)), shape=(3, 2, 3))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign values
# The shape of the atom should be added at the end of the arrays
a = numpy.arange(2 * 3*2*4, dtype='i4').reshape((2, 3, 2, 4))
ca[:] = [a * 1, a*2, a*3]
self.assertEqual(ca.nrows, 3)
if common.verbose:
print("Third row-->", ca[2])
self.assertTrue(allequal(ca[2], a * 3))
def test03b_MDMDMD(self):
"""Complex assign of a MD array in a MD CArray with a MD atom (II)."""
# Create an CArray
ca = self.h5file.create_carray(
'/', 'test', atom=Int32Atom((2, 4)), shape=(2, 3, 3))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign values
# The shape of the atom should be added at the end of the arrays
a = numpy.arange(2 * 3*3*2*4, dtype='i4').reshape((2, 3, 3, 2, 4))
ca[:] = a
self.assertEqual(ca.nrows, 2)
if common.verbose:
print("Third row-->", ca[:, 2, ...])
self.assertTrue(allequal(ca[:, 2, ...], a[:, 2, ...]))
def test03c_MDMDMD(self):
"""Complex assign of a MD array in a MD CArray with a MD atom (III)."""
# Create an CArray
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom((2, 4)), shape=(3, 1, 2))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Assign values
# The shape of the atom should be added at the end of the arrays
a = numpy.arange(3 * 1*2*2*4, dtype='i4').reshape((3, 1, 2, 2, 4))
ca[:] = a
self.assertEqual(ca.nrows, 3)
if common.verbose:
print("Second row-->", ca[:, :, 1, ...])
self.assertTrue(allequal(ca[:, :, 1, ...], a[:, :, 1, ...]))
class MDAtomNoReopen(MDAtomTestCase):
reopen = False
class MDAtomReopen(MDAtomTestCase):
reopen = True
# Test for building very large MD atoms without defaults. Ticket #211.
class MDLargeAtomTestCase(common.TempFileMixin, TestCase):
def test01_create(self):
"""Create a CArray with a very large MD atom."""
N = 2**16 # 4x larger than maximum object header size (64 KB)
ca = self.h5file.create_carray('/', 'test',
atom=Int32Atom(shape=N), shape=(1,))
if self.reopen:
self._reopen('a')
ca = self.h5file.root.test
# Check the value
if common.verbose:
print("First row-->", ca[0])
self.assertTrue(allequal(ca[0], numpy.zeros(N, 'i4')))
class MDLargeAtomNoReopen(MDLargeAtomTestCase):
reopen = False
class MDLargeAtomReopen(MDLargeAtomTestCase):
reopen = True
class AccessClosedTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(AccessClosedTestCase, self).setUp()
self.array = self.h5file.create_carray(self.h5file.root, 'array',
atom=Int32Atom(),
shape=(10, 10))
self.array[...] = numpy.zeros((10, 10))
def test_read(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.read)
def test_getitem(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.__getitem__, 0)
def test_setitem(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.__setitem__, 0, 0)
class TestCreateCArrayArgs(common.TempFileMixin, TestCase):
obj = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
where = '/'
name = 'carray'
atom = Atom.from_dtype(obj.dtype)
shape = obj.shape
title = 'title'
filters = None
chunkshape = (1, 2)
byteorder = None
createparents = False
def test_positional_args_01(self):
self.h5file.create_carray(self.where, self.name,
self.atom, self.shape,
self.title, self.filters, self.chunkshape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(numpy.zeros_like(self.obj), nparr))
def test_positional_args_02(self):
ptarr = self.h5file.create_carray(self.where, self.name,
self.atom, self.shape,
self.title,
self.filters, self.chunkshape)
ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_positional_args_obj(self):
self.h5file.create_carray(self.where, self.name,
None, None,
self.title,
self.filters, self.chunkshape,
self.byteorder, self.createparents,
self.obj)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj(self):
self.h5file.create_carray(self.where, self.name, title=self.title,
chunkshape=self.chunkshape,
obj=self.obj)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_01(self):
ptarr = self.h5file.create_carray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
atom=self.atom, shape=self.shape)
ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_02(self):
ptarr = self.h5file.create_carray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
atom=self.atom, shape=self.shape)
#ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(numpy.zeros_like(self.obj), nparr))
def test_kwargs_obj_atom(self):
ptarr = self.h5file.create_carray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
obj=self.obj,
atom=self.atom)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_shape(self):
ptarr = self.h5file.create_carray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
obj=self.obj,
shape=self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_shape(self):
ptarr = self.h5file.create_carray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
obj=self.obj,
atom=self.atom,
shape=self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_error(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_carray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom)
def test_kwargs_obj_shape_error(self):
#atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_carray,
self.where,
self.name,
title=self.title,
obj=self.obj,
shape=shape)
def test_kwargs_obj_atom_shape_error_01(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_carray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=self.shape)
def test_kwargs_obj_atom_shape_error_02(self):
#atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_carray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=self.atom,
shape=shape)
def test_kwargs_obj_atom_shape_error_03(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_carray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=shape)
def suite():
theSuite = unittest.TestSuite()
niter = 1
# common.heavy = 1 # uncomment this only for testing purposes
# theSuite.addTest(unittest.makeSuite(BasicTestCase))
for n in range(niter):
theSuite.addTest(unittest.makeSuite(BasicWriteTestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite2TestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite3TestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite4TestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite5TestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite6TestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite7TestCase))
theSuite.addTest(unittest.makeSuite(BasicWrite8TestCase))
theSuite.addTest(unittest.makeSuite(EmptyCArrayTestCase))
theSuite.addTest(unittest.makeSuite(EmptyCArray2TestCase))
theSuite.addTest(unittest.makeSuite(SlicesCArrayTestCase))
theSuite.addTest(unittest.makeSuite(Slices2CArrayTestCase))
theSuite.addTest(unittest.makeSuite(EllipsisCArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis2CArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis3CArrayTestCase))
theSuite.addTest(unittest.makeSuite(ZlibComprTestCase))
theSuite.addTest(unittest.makeSuite(ZlibShuffleTestCase))
theSuite.addTest(unittest.makeSuite(BloscComprTestCase))
theSuite.addTest(unittest.makeSuite(BloscShuffleTestCase))
theSuite.addTest(unittest.makeSuite(BloscFletcherTestCase))
theSuite.addTest(unittest.makeSuite(BloscBloscLZTestCase))
theSuite.addTest(unittest.makeSuite(BloscLZ4TestCase))
theSuite.addTest(unittest.makeSuite(BloscLZ4HCTestCase))
theSuite.addTest(unittest.makeSuite(BloscSnappyTestCase))
theSuite.addTest(unittest.makeSuite(BloscZlibTestCase))
theSuite.addTest(unittest.makeSuite(LZOComprTestCase))
theSuite.addTest(unittest.makeSuite(LZOShuffleTestCase))
theSuite.addTest(unittest.makeSuite(Bzip2ComprTestCase))
theSuite.addTest(unittest.makeSuite(Bzip2ShuffleTestCase))
theSuite.addTest(unittest.makeSuite(FloatTypeTestCase))
theSuite.addTest(unittest.makeSuite(ComplexTypeTestCase))
theSuite.addTest(unittest.makeSuite(StringTestCase))
theSuite.addTest(unittest.makeSuite(String2TestCase))
theSuite.addTest(unittest.makeSuite(StringComprTestCase))
theSuite.addTest(unittest.makeSuite(Int8TestCase))
theSuite.addTest(unittest.makeSuite(Int16TestCase))
theSuite.addTest(unittest.makeSuite(Int32TestCase))
theSuite.addTest(unittest.makeSuite(Float16TestCase))
theSuite.addTest(unittest.makeSuite(Float32TestCase))
theSuite.addTest(unittest.makeSuite(Float64TestCase))
theSuite.addTest(unittest.makeSuite(Float96TestCase))
theSuite.addTest(unittest.makeSuite(Float128TestCase))
theSuite.addTest(unittest.makeSuite(Complex64TestCase))
theSuite.addTest(unittest.makeSuite(Complex128TestCase))
theSuite.addTest(unittest.makeSuite(Complex192TestCase))
theSuite.addTest(unittest.makeSuite(Complex256TestCase))
theSuite.addTest(unittest.makeSuite(ComprTestCase))
theSuite.addTest(unittest.makeSuite(OffsetStrideTestCase))
theSuite.addTest(unittest.makeSuite(Fletcher32TestCase))
theSuite.addTest(unittest.makeSuite(AllFiltersTestCase))
theSuite.addTest(unittest.makeSuite(ReadOutArgumentTests))
theSuite.addTest(unittest.makeSuite(
SizeOnDiskInMemoryPropertyTestCase))
theSuite.addTest(unittest.makeSuite(CloseCopyTestCase))
theSuite.addTest(unittest.makeSuite(OpenCopyTestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex1TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex2TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex3TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex4TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex5TestCase))
theSuite.addTest(unittest.makeSuite(BigArrayTestCase))
theSuite.addTest(unittest.makeSuite(DfltAtomNoReopen))
theSuite.addTest(unittest.makeSuite(DfltAtomReopen))
theSuite.addTest(unittest.makeSuite(AtomDefaultReprNoReopen))
theSuite.addTest(unittest.makeSuite(AtomDefaultReprReopen))
theSuite.addTest(unittest.makeSuite(TruncateTestCase))
theSuite.addTest(unittest.makeSuite(MDAtomNoReopen))
theSuite.addTest(unittest.makeSuite(MDAtomReopen))
theSuite.addTest(unittest.makeSuite(MDLargeAtomNoReopen))
theSuite.addTest(unittest.makeSuite(MDLargeAtomReopen))
theSuite.addTest(unittest.makeSuite(AccessClosedTestCase))
theSuite.addTest(unittest.makeSuite(TestCreateCArrayArgs))
if common.heavy:
theSuite.addTest(unittest.makeSuite(Slices3CArrayTestCase))
theSuite.addTest(unittest.makeSuite(Slices4CArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis4CArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis5CArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis6CArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis7CArrayTestCase))
theSuite.addTest(unittest.makeSuite(MD3WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD5WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD6WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD7WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD10WriteTestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex6TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex7TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex8TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex9TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex10TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex11TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex12TestCase))
theSuite.addTest(unittest.makeSuite(Rows64bitsTestCase1))
theSuite.addTest(unittest.makeSuite(Rows64bitsTestCase2))
return theSuite
if __name__ == '__main__':
common.parse_argv(sys.argv)
common.print_versions()
unittest.main(defaultTest='suite')
## Local Variables:
## mode: python
## py-indent-offset: 4
## tab-width: 4
## End:
|
2013Commons/HUE-SHARK | refs/heads/master | desktop/core/ext-py/Django-1.2.3/tests/modeltests/model_package/tests.py | 11 | from django.db import models
class Advertisment(models.Model):
customer = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", null=True, blank=True)
class Meta:
app_label = 'model_package'
__test__ = {'API_TESTS': """
>>> from models.publication import Publication
>>> from models.article import Article
>>> from django.contrib.auth.views import Site
>>> p = Publication(title="FooBar")
>>> p.save()
>>> p
<Publication: Publication object>
>>> from django.contrib.sites.models import Site
>>> current_site = Site.objects.get_current()
>>> current_site
<Site: example.com>
# Regression for #12168: models split into subpackages still get M2M tables
>>> a = Article(headline="a foo headline")
>>> a.save()
>>> a.publications.add(p)
>>> a.sites.add(current_site)
>>> a = Article.objects.get(id=1)
>>> a
<Article: Article object>
>>> a.id
1
>>> a.sites.count()
1
# Regression for #12245 - Models can exist in the test package, too
>>> ad = Advertisment(customer="Lawrence Journal-World")
>>> ad.save()
>>> ad.publications.add(p)
>>> ad = Advertisment.objects.get(id=1)
>>> ad
<Advertisment: Advertisment object>
>>> ad.publications.count()
1
# Regression for #12386 - field names on the autogenerated intermediate class
# that are specified as dotted strings don't retain any path component for the
# field or column name
>>> Article.publications.through._meta.fields[1].name
'article'
>>> Article.publications.through._meta.fields[1].get_attname_column()
('article_id', 'article_id')
>>> Article.publications.through._meta.fields[2].name
'publication'
>>> Article.publications.through._meta.fields[2].get_attname_column()
('publication_id', 'publication_id')
# The oracle backend truncates the name to 'model_package_article_publ233f'.
>>> Article._meta.get_field('publications').m2m_db_table() \\
... in ('model_package_article_publications', 'model_package_article_publ233f')
True
>>> Article._meta.get_field('publications').m2m_column_name()
'article_id'
>>> Article._meta.get_field('publications').m2m_reverse_name()
'publication_id'
"""}
|
TRESCLOUD/odoopub | refs/heads/master | openerp/addons/test_impex/tests/test_export.py | 158 | # -*- coding: utf-8 -*-
import itertools
import openerp.modules.registry
import openerp
from openerp.tests import common
class CreatorCase(common.TransactionCase):
model_name = False
def __init__(self, *args, **kwargs):
super(CreatorCase, self).__init__(*args, **kwargs)
self.model = None
def setUp(self):
super(CreatorCase, self).setUp()
self.model = self.registry(self.model_name)
def make(self, value):
id = self.model.create(self.cr, openerp.SUPERUSER_ID, {'value': value})
return self.model.browse(self.cr, openerp.SUPERUSER_ID, [id])[0]
def export(self, value, fields=('value',), context=None):
record = self.make(value)
return record._BaseModel__export_rows([f.split('/') for f in fields])
class test_boolean_field(CreatorCase):
model_name = 'export.boolean'
def test_true(self):
self.assertEqual(
self.export(True),
[[u'True']])
def test_false(self):
""" ``False`` value to boolean fields is unique in being exported as a
(unicode) string, not a boolean
"""
self.assertEqual(
self.export(False),
[[u'False']])
class test_integer_field(CreatorCase):
model_name = 'export.integer'
def test_empty(self):
self.assertEqual(self.model.search(self.cr, openerp.SUPERUSER_ID, []), [],
"Test model should have no records")
def test_0(self):
self.assertEqual(
self.export(0),
[[False]])
def test_basic_value(self):
self.assertEqual(
self.export(42),
[[u'42']])
def test_negative(self):
self.assertEqual(
self.export(-32),
[[u'-32']])
def test_huge(self):
self.assertEqual(
self.export(2**31-1),
[[unicode(2**31-1)]])
class test_float_field(CreatorCase):
model_name = 'export.float'
def test_0(self):
self.assertEqual(
self.export(0.0),
[[False]])
def test_epsilon(self):
self.assertEqual(
self.export(0.000000000027),
[[u'2.7e-11']])
def test_negative(self):
self.assertEqual(
self.export(-2.42),
[[u'-2.42']])
def test_positive(self):
self.assertEqual(
self.export(47.36),
[[u'47.36']])
def test_big(self):
self.assertEqual(
self.export(87654321.4678),
[[u'87654321.4678']])
class test_decimal_field(CreatorCase):
model_name = 'export.decimal'
def test_0(self):
self.assertEqual(
self.export(0.0),
[[False]])
def test_epsilon(self):
""" epsilon gets sliced to 0 due to precision
"""
self.assertEqual(
self.export(0.000000000027),
[[False]])
def test_negative(self):
self.assertEqual(
self.export(-2.42),
[[u'-2.42']])
def test_positive(self):
self.assertEqual(
self.export(47.36),
[[u'47.36']])
def test_big(self):
self.assertEqual(
self.export(87654321.4678), [[u'87654321.468']])
class test_string_field(CreatorCase):
model_name = 'export.string.bounded'
def test_empty(self):
self.assertEqual(
self.export(""),
[[False]])
def test_within_bounds(self):
self.assertEqual(
self.export("foobar"),
[[u"foobar"]])
def test_out_of_bounds(self):
self.assertEqual(
self.export("C for Sinking, "
"Java for Drinking, "
"Smalltalk for Thinking. "
"...and Power to the Penguin!"),
[[u"C for Sinking, J"]])
class test_unbound_string_field(CreatorCase):
model_name = 'export.string'
def test_empty(self):
self.assertEqual(
self.export(""),
[[False]])
def test_small(self):
self.assertEqual(
self.export("foobar"),
[[u"foobar"]])
def test_big(self):
self.assertEqual(
self.export("We flew down weekly to meet with IBM, but they "
"thought the way to measure software was the amount "
"of code we wrote, when really the better the "
"software, the fewer lines of code."),
[[u"We flew down weekly to meet with IBM, but they thought the "
u"way to measure software was the amount of code we wrote, "
u"when really the better the software, the fewer lines of "
u"code."]])
class test_text(CreatorCase):
model_name = 'export.text'
def test_empty(self):
self.assertEqual(
self.export(""),
[[False]])
def test_small(self):
self.assertEqual(
self.export("foobar"),
[[u"foobar"]])
def test_big(self):
self.assertEqual(
self.export("So, `bind' is `let' and monadic programming is"
" equivalent to programming in the A-normal form. That"
" is indeed all there is to monads"),
[[u"So, `bind' is `let' and monadic programming is equivalent to"
u" programming in the A-normal form. That is indeed all there"
u" is to monads"]])
class test_date(CreatorCase):
model_name = 'export.date'
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_basic(self):
self.assertEqual(
self.export('2011-11-07'),
[[u'2011-11-07']])
class test_datetime(CreatorCase):
model_name = 'export.datetime'
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_basic(self):
self.assertEqual(
self.export('2011-11-07 21:05:48'),
[[u'2011-11-07 21:05:48']])
def test_tz(self):
""" Export ignores the timezone and always exports to UTC
.. note:: on the other hand, export uses user lang for name_get
"""
# NOTE: ignores user timezone, always exports to UTC
self.assertEqual(
self.export('2011-11-07 21:05:48', context={'tz': 'Pacific/Norfolk'}),
[[u'2011-11-07 21:05:48']])
class test_selection(CreatorCase):
model_name = 'export.selection'
translations_fr = [
("Qux", "toto"),
("Bar", "titi"),
("Foo", "tete"),
]
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_value(self):
""" selections export the *label* for their value
"""
self.assertEqual(
self.export(2),
[[u"Bar"]])
def test_localized_export(self):
self.registry('res.lang').create(self.cr, openerp.SUPERUSER_ID, {
'name': u'Français',
'code': 'fr_FR',
'translatable': True,
'date_format': '%d.%m.%Y',
'decimal_point': ',',
'thousands_sep': ' ',
})
Translations = self.registry('ir.translation')
for source, value in self.translations_fr:
Translations.create(self.cr, openerp.SUPERUSER_ID, {
'name': 'export.selection,value',
'lang': 'fr_FR',
'type': 'selection',
'src': source,
'value': value
})
self.assertEqual(
self.export(2, context={'lang': 'fr_FR'}),
[[u'Bar']])
class test_selection_function(CreatorCase):
model_name = 'export.selection.function'
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_value(self):
# FIXME: selection functions export the *value* itself
self.assertEqual(
self.export(1),
[[1]])
self.assertEqual(
self.export(3),
[[3]])
# fucking hell
self.assertEqual(
self.export(0),
[[False]])
class test_m2o(CreatorCase):
model_name = 'export.many2one'
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_basic(self):
""" Exported value is the name_get of the related object
"""
integer_id = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
name = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id]))[integer_id]
self.assertEqual(
self.export(integer_id),
[[name]])
def test_path(self):
""" Can recursively export fields of m2o via path
"""
integer_id = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
self.assertEqual(
self.export(integer_id, fields=['value/.id', 'value/value']),
[[unicode(integer_id), u'42']])
def test_external_id(self):
integer_id = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
# Expecting the m2o target model name in the external id,
# not this model's name
external_id = u'__export__.export_integer_%d' % integer_id
self.assertEqual(
self.export(integer_id, fields=['value/id']),
[[external_id]])
class test_o2m(CreatorCase):
model_name = 'export.one2many'
commands = [
(0, False, {'value': 4, 'str': 'record1'}),
(0, False, {'value': 42, 'str': 'record2'}),
(0, False, {'value': 36, 'str': 'record3'}),
(0, False, {'value': 4, 'str': 'record4'}),
(0, False, {'value': 13, 'str': 'record5'}),
]
names = [
u'export.one2many.child:%d' % d['value']
for c, _, d in commands
]
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_single(self):
self.assertEqual(
self.export([(0, False, {'value': 42})]),
# name_get result
[[u'export.one2many.child:42']])
def test_single_subfield(self):
self.assertEqual(
self.export([(0, False, {'value': 42})],
fields=['value', 'value/value']),
[[u'export.one2many.child:42', u'42']])
def test_integrate_one_in_parent(self):
self.assertEqual(
self.export([(0, False, {'value': 42})],
fields=['const', 'value/value']),
[[u'4', u'42']])
def test_multiple_records(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value/value']),
[
[u'4', u'4'],
[u'', u'42'],
[u'', u'36'],
[u'', u'4'],
[u'', u'13'],
])
def test_multiple_records_name(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value']),
[[
u'4', u','.join(self.names)
]])
def test_multiple_records_id(self):
export = self.export(self.commands, fields=['const', 'value/.id'])
O2M_c = self.registry('export.one2many.child')
ids = O2M_c.browse(self.cr, openerp.SUPERUSER_ID,
O2M_c.search(self.cr, openerp.SUPERUSER_ID, []))
self.assertEqual(
export,
[
['4', str(ids[0].id)],
['', str(ids[1].id)],
['', str(ids[2].id)],
['', str(ids[3].id)],
['', str(ids[4].id)],
])
def test_multiple_records_with_name_before(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value', 'value/value']),
[[ # exports sub-fields of very first o2m
u'4', u','.join(self.names), u'4'
]])
def test_multiple_records_with_name_after(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value/value', 'value']),
[ # completely ignores name_get request
[u'4', u'4', ''],
['', u'42', ''],
['', u'36', ''],
['', u'4', ''],
['', u'13', ''],
])
def test_multiple_subfields_neighbour(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value/str','value/value']),
[
[u'4', u'record1', u'4'],
['', u'record2', u'42'],
['', u'record3', u'36'],
['', u'record4', u'4'],
['', u'record5', u'13'],
])
def test_multiple_subfields_separated(self):
self.assertEqual(
self.export(self.commands, fields=['value/str', 'const', 'value/value']),
[
[u'record1', u'4', u'4'],
[u'record2', '', u'42'],
[u'record3', '', u'36'],
[u'record4', '', u'4'],
[u'record5', '', u'13'],
])
class test_o2m_multiple(CreatorCase):
model_name = 'export.one2many.multiple'
def make(self, value=None, **values):
if value is not None: values['value'] = value
id = self.model.create(self.cr, openerp.SUPERUSER_ID, values)
return self.model.browse(self.cr, openerp.SUPERUSER_ID, [id])[0]
def export(self, value=None, fields=('child1', 'child2',), context=None, **values):
record = self.make(value, **values)
return record._BaseModel__export_rows([f.split('/') for f in fields])
def test_empty(self):
self.assertEqual(
self.export(child1=False, child2=False),
[[False, False]])
def test_single_per_side(self):
self.assertEqual(
self.export(child1=False, child2=[(0, False, {'value': 42})]),
[[False, u'export.one2many.child.2:42']])
self.assertEqual(
self.export(child1=[(0, False, {'value': 43})], child2=False),
[[u'export.one2many.child.1:43', False]])
self.assertEqual(
self.export(child1=[(0, False, {'value': 43})],
child2=[(0, False, {'value': 42})]),
[[u'export.one2many.child.1:43', u'export.one2many.child.2:42']])
def test_single_integrate_subfield(self):
fields = ['const', 'child1/value', 'child2/value']
self.assertEqual(
self.export(child1=False, child2=[(0, False, {'value': 42})],
fields=fields),
[[u'36', False, u'42']])
self.assertEqual(
self.export(child1=[(0, False, {'value': 43})], child2=False,
fields=fields),
[[u'36', u'43', False]])
self.assertEqual(
self.export(child1=[(0, False, {'value': 43})],
child2=[(0, False, {'value': 42})],
fields=fields),
[[u'36', u'43', u'42']])
def test_multiple(self):
""" With two "concurrent" o2ms, exports the first line combined, then
exports the rows for the first o2m, then the rows for the second o2m.
"""
fields = ['const', 'child1/value', 'child2/value']
child1 = [(0, False, {'value': v, 'str': 'record%.02d' % index})
for index, v in zip(itertools.count(), [4, 42, 36, 4, 13])]
child2 = [(0, False, {'value': v, 'str': 'record%.02d' % index})
for index, v in zip(itertools.count(10), [8, 12, 8, 55, 33, 13])]
self.assertEqual(
self.export(child1=child1, child2=False, fields=fields),
[
[u'36', u'4', False],
['', u'42', ''],
['', u'36', ''],
['', u'4', ''],
['', u'13', ''],
])
self.assertEqual(
self.export(child1=False, child2=child2, fields=fields),
[
[u'36', False, u'8'],
['', '', u'12'],
['', '', u'8'],
['', '', u'55'],
['', '', u'33'],
['', '', u'13'],
])
self.assertEqual(
self.export(child1=child1, child2=child2, fields=fields),
[
[u'36', u'4', u'8'],
['', u'42', ''],
['', u'36', ''],
['', u'4', ''],
['', u'13', ''],
['', '', u'12'],
['', '', u'8'],
['', '', u'55'],
['', '', u'33'],
['', '', u'13'],
])
class test_m2m(CreatorCase):
model_name = 'export.many2many'
commands = [
(0, False, {'value': 4, 'str': 'record000'}),
(0, False, {'value': 42, 'str': 'record001'}),
(0, False, {'value': 36, 'str': 'record010'}),
(0, False, {'value': 4, 'str': 'record011'}),
(0, False, {'value': 13, 'str': 'record100'}),
]
names = [
u'export.many2many.other:%d' % d['value']
for c, _, d in commands
]
def test_empty(self):
self.assertEqual(
self.export(False),
[[False]])
def test_single(self):
self.assertEqual(
self.export([(0, False, {'value': 42})]),
# name_get result
[[u'export.many2many.other:42']])
def test_single_subfield(self):
self.assertEqual(
self.export([(0, False, {'value': 42})],
fields=['value', 'value/value']),
[[u'export.many2many.other:42', u'42']])
def test_integrate_one_in_parent(self):
self.assertEqual(
self.export([(0, False, {'value': 42})],
fields=['const', 'value/value']),
[[u'4', u'42']])
def test_multiple_records(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value/value']),
[
[u'4', u'4'],
[u'', u'42'],
[u'', u'36'],
[u'', u'4'],
[u'', u'13'],
])
def test_multiple_records_name(self):
self.assertEqual(
self.export(self.commands, fields=['const', 'value']),
[[ # FIXME: hardcoded comma, import uses config.csv_internal_sep
# resolution: remove configurable csv_internal_sep
u'4', u','.join(self.names)
]])
# essentially same as o2m, so boring
class test_function(CreatorCase):
model_name = 'export.function'
def test_value(self):
""" Exports value normally returned by accessing the function field
"""
self.assertEqual(
self.export(42),
[[u'3']])
|
suncycheng/intellij-community | refs/heads/master | python/testData/codeInsight/controlflow/tryexceptelsefinally.py | 83 | d = dict()
try:
v = d['key']
except KeyError:
print 'element not found'
else:
print 'element value {0}'.format(v)
finally:
print 'excuting finally clause' |
mozilla/stoneridge | refs/heads/master | python/src/Lib/encodings/raw_unicode_escape.py | 852 | """ Python 'raw-unicode-escape' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.raw_unicode_escape_encode
decode = codecs.raw_unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.raw_unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.raw_unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='raw-unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
lantip/aws-filemanager | refs/heads/master | cloud_browser/tests/__init__.py | 1 | # The code of class AWSMockServiceTestCase is from the boto projet:
# https://github.com/boto/boto/blob/develop/tests/unit/__init__.py
# The code of loding unittest from multiple files refers the code in:
# http://stackoverflow.com/questions/6248510
#
# Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import unittest
from boto.compat import http_client
import mock
class AWSMockServiceTestCase(unittest.TestCase):
"""Base class for mocking aws services."""
# This param is used by the unittest module to display a full
# diff when assert*Equal methods produce an error message.
maxDiff = None
connection_class = None
def setUp(self): # pylint: disable=invalid-name
self.https_connection = mock.Mock(spec=http_client.HTTPSConnection)
self.https_connection.debuglevel = 0
self.https_connection_factory = (
mock.Mock(return_value=self.https_connection), ())
self.service_connection = self.create_service_connection(
https_connection_factory=self.https_connection_factory,
aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key')
self.initialize_service_connection()
def initialize_service_connection(self):
self.actual_request = None
# pylint: disable=protected-access, attribute-defined-outside-init
self.original_mexe = self.service_connection._mexe
# pylint: disable=protected-access
self.service_connection._mexe = self._mexe_spy
def create_service_connection(self, **kwargs):
if self.connection_class is None:
raise ValueError("The connection_class class attribute must be "
"set to a non-None value.")
# pylint: disable=not-callable
return self.connection_class(**kwargs)
def _mexe_spy(self, request, *args, **kwargs):
# pylint: disable=attribute-defined-outside-init
self.actual_request = request
return self.original_mexe(request, *args, **kwargs)
# pylint: disable=protected-access, dangerous-default-value
def create_response(self, status_code, reason='', header=[], body=None):
if body is None:
body = self.default_body()
response = mock.Mock(spec=http_client.HTTPResponse)
response.status = status_code
response.read.return_value = body
response.reason = reason
response.getheaders.return_value = header
response.msg = dict(header)
def overwrite_header(arg, default=None):
header_dict = dict(header)
if arg in header_dict:
return header_dict[arg]
else:
return default
response.getheader.side_effect = overwrite_header
return response
# pylint: disable=protected-access, dangerous-default-value
def set_http_response(self, status_code, reason='', header=[], body=None):
http_response = self.create_response(status_code, reason, header, body)
self.https_connection.getresponse.return_value = http_response
def default_body(self): # pylint: disable=no-self-use
return ''
import pkgutil
def suite():
return unittest.TestLoader().discover(
"cloud_browser.tests",
pattern="*.py")
if '__path__' in locals():
for loader, module_name, is_pkg in pkgutil.walk_packages(__path__):
module = loader.find_module(module_name).load_module(module_name)
for name in dir(module):
obj = getattr(module, name)
if (isinstance(obj, type) and
issubclass(obj, unittest.case.TestCase)):
# pylint: disable=exec-statement
exec ('%s = obj' % obj.__name__)
|
endlessm/chromium-browser | refs/heads/master | third_party/webgl/src/sdk/tests/deqp/functional/gles3/transformfeedback/transformfeedback_test_generator.py | 6 | #!/usr/bin/env python
# Copyright (c) 2019 The Khronos Group Inc.
# Use of this source code is governed by an MIT-style license that can be
# found in the LICENSE.txt file.
"""
Generator for textureformat* tests.
This file needs to be run in its folder.
"""
import sys
_DO_NOT_EDIT_WARNING = """<!--
This file is auto-generated from textureshadow_test_generator.py
DO NOT EDIT!
-->
"""
_HTML_TEMPLATE = """<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>WebGL Transform Feedback Tests</title>
<link rel="stylesheet" href="../../../../resources/js-test-style.css"/>
<script src="../../../../js/js-test-pre.js"></script>
<script src="../../../../js/webgl-test-utils.js"></script>
<script src="../../../../closure-library/closure/goog/base.js"></script>
<script src="../../../deqp-deps.js"></script>
<script>goog.require('functional.gles3.es3fTransformFeedbackTests');</script>
<script>goog.require('framework.opengl.gluVarTypeUtil');</script>
</head>
<body>
<div id="description"></div>
<div id="console"></div>
<canvas id="canvas" width="320" height="240"></canvas>
<script>
var wtu = WebGLTestUtils;
var gl = wtu.create3DContext('canvas', {preserveDrawingBuffer: true}, 2);
functional.gles3.es3fTransformFeedbackTests.run(gl, [%(start)s, %(end)s]);
</script>
</body>
</html>
"""
_GROUPS = [
'position',
'point_size',
'basic_types_separate_points',
'basic_types_separate_lines',
'basic_types_separate_triangles',
'basic_types_interleaved_points',
'basic_types_interleaved_lines',
'basic_types_interleaved_triangles',
'array_separate_points',
'array_separate_lines',
'array_separate_triangles',
'array_interleaved_points',
'array_interleaved_lines',
'array_interleaved_triangles',
'array_element_separate_points',
'array_element_separate_lines',
'array_element_separate_triangles',
'array_element_interleaved_points',
'array_element_interleaved_lines',
'array_element_interleaved_triangles',
'interpolation_smooth',
'interpolation_flat',
'interpolation_centroid',
'random_separate_points',
'random_separate_lines',
'random_separate_triangles',
'random_interleaved_points',
'random_interleaved_lines',
'random_interleaved_triangles'
]
def GenerateFilename(group):
"""Generate test filename."""
filename = group
filename += ".html"
return filename
def WriteTest(filename, start, end):
"""Write one test."""
file = open(filename, "wb")
file.write(_DO_NOT_EDIT_WARNING)
file.write(_HTML_TEMPLATE % {
'start': start,
'end': end
})
file.close
def GenerateTests():
"""Generate all tests."""
filelist = []
for i in xrange(len(_GROUPS)):
groupname = _GROUPS[i]
filename = GenerateFilename(groupname)
filelist.append(filename)
WriteTest(filename, i, i+1)
return filelist
def GenerateTestList(filelist):
file = open("00_test_list.txt", "wb")
file.write('\n'.join(filelist))
file.close
def main(argv):
"""This is the main function."""
filelist = GenerateTests()
GenerateTestList(filelist)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
hiway/micropython | refs/heads/master | tests/basics/string1.py | 92 | # basic strings
# literals
print('abc')
print(r'abc')
print(u'abc')
print(repr('\a\b\t\n\v\f\r'))
print('\z') # unrecognised escape char
# construction
print(str())
print(str('abc'))
# inplace addition
x = 'abc'
print(x)
x += 'def'
print(x)
# binary ops
print('123' + "456")
print('123' * 5)
try:
'123' * '1'
except TypeError:
print('TypeError')
try:
'123' + 1
except TypeError:
print('TypeError')
# subscription
print('abc'[1])
print('abc'[-1])
try:
'abc'[100]
except IndexError:
print('IndexError')
try:
'abc'[-4]
except IndexError:
print('IndexError')
# iter
print(list('str'))
# comparison
print('123' + '789' == '123789')
print('a' + 'b' != 'a' + 'b ')
print('1' + '2' > '2')
print('1' + '2' < '2')
# printing quote char in string
print(repr('\'\"'))
|
feilongfl/micropython | refs/heads/master | tests/basics/list_copy.py | 119 | # list copy tests
a = [1, 2, []]
b = a.copy()
a[-1].append(1)
a.append(4)
print(a)
print(b)
|
hanv89/api-client-python | refs/heads/master | lib/oauth2client/service_account.py | 9 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""oauth2client Service account credentials class."""
import base64
import copy
import datetime
import httplib2
import json
import time
from oauth2client import GOOGLE_REVOKE_URI
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client._helpers import _json_encode
from oauth2client._helpers import _from_bytes
from oauth2client._helpers import _urlsafe_b64encode
from oauth2client import util
from oauth2client.client import _apply_user_agent
from oauth2client.client import _initialize_headers
from oauth2client.client import AccessTokenInfo
from oauth2client.client import AssertionCredentials
from oauth2client.client import clean_headers
from oauth2client.client import EXPIRY_FORMAT
from oauth2client.client import GoogleCredentials
from oauth2client.client import SERVICE_ACCOUNT
from oauth2client.client import TokenRevokeError
from oauth2client.client import _UTCNOW
from oauth2client import crypt
_PASSWORD_DEFAULT = 'notasecret'
_PKCS12_KEY = '_private_key_pkcs12'
_PKCS12_ERROR = r"""
This library only implements PKCS#12 support via the pyOpenSSL library.
Either install pyOpenSSL, or please convert the .p12 file
to .pem format:
$ cat key.p12 | \
> openssl pkcs12 -nodes -nocerts -passin pass:notasecret | \
> openssl rsa > key.pem
"""
class ServiceAccountCredentials(AssertionCredentials):
"""Service Account credential for OAuth 2.0 signed JWT grants.
Supports
* JSON keyfile (typically contains a PKCS8 key stored as
PEM text)
* ``.p12`` key (stores PKCS12 key and certificate)
Makes an assertion to server using a signed JWT assertion in exchange
for an access token.
This credential does not require a flow to instantiate because it
represents a two legged flow, and therefore has all of the required
information to generate and refresh its own access tokens.
Args:
service_account_email: string, The email associated with the
service account.
signer: ``crypt.Signer``, A signer which can be used to sign content.
scopes: List or string, (Optional) Scopes to use when acquiring
an access token.
private_key_id: string, (Optional) Private key identifier. Typically
only used with a JSON keyfile. Can be sent in the
header of a JWT token assertion.
client_id: string, (Optional) Client ID for the project that owns the
service account.
user_agent: string, (Optional) User agent to use when sending
request.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
kwargs: dict, Extra key-value pairs (both strings) to send in the
payload body when making an assertion.
"""
MAX_TOKEN_LIFETIME_SECS = 3600
"""Max lifetime of the token (one hour, in seconds)."""
NON_SERIALIZED_MEMBERS = (
frozenset(['_signer']) |
AssertionCredentials.NON_SERIALIZED_MEMBERS)
"""Members that aren't serialized when object is converted to JSON."""
# Can be over-ridden by factory constructors. Used for
# serialization/deserialization purposes.
_private_key_pkcs8_pem = None
_private_key_pkcs12 = None
_private_key_password = None
def __init__(self,
service_account_email,
signer,
scopes='',
private_key_id=None,
client_id=None,
user_agent=None,
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI,
**kwargs):
super(ServiceAccountCredentials, self).__init__(
None, user_agent=user_agent, token_uri=token_uri,
revoke_uri=revoke_uri)
self._service_account_email = service_account_email
self._signer = signer
self._scopes = util.scopes_to_string(scopes)
self._private_key_id = private_key_id
self.client_id = client_id
self._user_agent = user_agent
self._kwargs = kwargs
def _to_json(self, strip, to_serialize=None):
"""Utility function that creates JSON repr. of a credentials object.
Over-ride is needed since PKCS#12 keys will not in general be JSON
serializable.
Args:
strip: array, An array of names of members to exclude from the
JSON.
to_serialize: dict, (Optional) The properties for this object
that will be serialized. This allows callers to modify
before serializing.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
if to_serialize is None:
to_serialize = copy.copy(self.__dict__)
pkcs12_val = to_serialize.get(_PKCS12_KEY)
if pkcs12_val is not None:
to_serialize[_PKCS12_KEY] = base64.b64encode(pkcs12_val)
return super(ServiceAccountCredentials, self)._to_json(
strip, to_serialize=to_serialize)
@classmethod
def _from_parsed_json_keyfile(cls, keyfile_dict, scopes,
token_uri=None, revoke_uri=None):
"""Helper for factory constructors from JSON keyfile.
Args:
keyfile_dict: dict-like object, The parsed dictionary-like object
containing the contents of the JSON keyfile.
scopes: List or string, Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile contents.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
"""
creds_type = keyfile_dict.get('type')
if creds_type != SERVICE_ACCOUNT:
raise ValueError('Unexpected credentials type', creds_type,
'Expected', SERVICE_ACCOUNT)
service_account_email = keyfile_dict['client_email']
private_key_pkcs8_pem = keyfile_dict['private_key']
private_key_id = keyfile_dict['private_key_id']
client_id = keyfile_dict['client_id']
if not token_uri:
token_uri = keyfile_dict.get('token_uri', GOOGLE_TOKEN_URI)
if not revoke_uri:
revoke_uri = keyfile_dict.get('revoke_uri', GOOGLE_REVOKE_URI)
signer = crypt.Signer.from_string(private_key_pkcs8_pem)
credentials = cls(service_account_email, signer, scopes=scopes,
private_key_id=private_key_id,
client_id=client_id, token_uri=token_uri,
revoke_uri=revoke_uri)
credentials._private_key_pkcs8_pem = private_key_pkcs8_pem
return credentials
@classmethod
def from_json_keyfile_name(cls, filename, scopes='',
token_uri=None, revoke_uri=None):
"""Factory constructor from JSON keyfile by name.
Args:
filename: string, The location of the keyfile.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in the key file, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in the key file, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
"""
with open(filename, 'r') as file_obj:
client_credentials = json.load(file_obj)
return cls._from_parsed_json_keyfile(client_credentials, scopes,
token_uri=token_uri,
revoke_uri=revoke_uri)
@classmethod
def from_json_keyfile_dict(cls, keyfile_dict, scopes='',
token_uri=None, revoke_uri=None):
"""Factory constructor from parsed JSON keyfile.
Args:
keyfile_dict: dict-like object, The parsed dictionary-like object
containing the contents of the JSON keyfile.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
"""
return cls._from_parsed_json_keyfile(keyfile_dict, scopes,
token_uri=token_uri,
revoke_uri=revoke_uri)
@classmethod
def _from_p12_keyfile_contents(cls, service_account_email,
private_key_pkcs12,
private_key_password=None, scopes='',
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI):
"""Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
private_key_pkcs12: string, The contents of a PKCS#12 keyfile.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
"""
if private_key_password is None:
private_key_password = _PASSWORD_DEFAULT
if crypt.Signer is not crypt.OpenSSLSigner:
raise NotImplementedError(_PKCS12_ERROR)
signer = crypt.Signer.from_string(private_key_pkcs12,
private_key_password)
credentials = cls(service_account_email, signer, scopes=scopes,
token_uri=token_uri, revoke_uri=revoke_uri)
credentials._private_key_pkcs12 = private_key_pkcs12
credentials._private_key_password = private_key_password
return credentials
@classmethod
def from_p12_keyfile(cls, service_account_email, filename,
private_key_password=None, scopes='',
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI):
"""Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
filename: string, The location of the PKCS#12 keyfile.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
"""
with open(filename, 'rb') as file_obj:
private_key_pkcs12 = file_obj.read()
return cls._from_p12_keyfile_contents(
service_account_email, private_key_pkcs12,
private_key_password=private_key_password, scopes=scopes,
token_uri=token_uri, revoke_uri=revoke_uri)
@classmethod
def from_p12_keyfile_buffer(cls, service_account_email, file_buffer,
private_key_password=None, scopes='',
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI):
"""Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
file_buffer: stream, A buffer that implements ``read()``
and contains the PKCS#12 key contents.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
"""
private_key_pkcs12 = file_buffer.read()
return cls._from_p12_keyfile_contents(
service_account_email, private_key_pkcs12,
private_key_password=private_key_password, scopes=scopes,
token_uri=token_uri, revoke_uri=revoke_uri)
def _generate_assertion(self):
"""Generate the assertion that will be used in the request."""
now = int(time.time())
payload = {
'aud': self.token_uri,
'scope': self._scopes,
'iat': now,
'exp': now + self.MAX_TOKEN_LIFETIME_SECS,
'iss': self._service_account_email,
}
payload.update(self._kwargs)
return crypt.make_signed_jwt(self._signer, payload,
key_id=self._private_key_id)
def sign_blob(self, blob):
"""Cryptographically sign a blob (of bytes).
Implements abstract method
:meth:`oauth2client.client.AssertionCredentials.sign_blob`.
Args:
blob: bytes, Message to be signed.
Returns:
tuple, A pair of the private key ID used to sign the blob and
the signed contents.
"""
return self._private_key_id, self._signer.sign(blob)
@property
def service_account_email(self):
"""Get the email for the current service account.
Returns:
string, The email associated with the service account.
"""
return self._service_account_email
@property
def serialization_data(self):
# NOTE: This is only useful for JSON keyfile.
return {
'type': 'service_account',
'client_email': self._service_account_email,
'private_key_id': self._private_key_id,
'private_key': self._private_key_pkcs8_pem,
'client_id': self.client_id,
}
@classmethod
def from_json(cls, json_data):
"""Deserialize a JSON-serialized instance.
Inverse to :meth:`to_json`.
Args:
json_data: dict or string, Serialized JSON (as a string or an
already parsed dictionary) representing a credential.
Returns:
ServiceAccountCredentials from the serialized data.
"""
if not isinstance(json_data, dict):
json_data = json.loads(_from_bytes(json_data))
private_key_pkcs8_pem = None
pkcs12_val = json_data.get(_PKCS12_KEY)
password = None
if pkcs12_val is None:
private_key_pkcs8_pem = json_data['_private_key_pkcs8_pem']
signer = crypt.Signer.from_string(private_key_pkcs8_pem)
else:
# NOTE: This assumes that private_key_pkcs8_pem is not also
# in the serialized data. This would be very incorrect
# state.
pkcs12_val = base64.b64decode(pkcs12_val)
password = json_data['_private_key_password']
signer = crypt.Signer.from_string(pkcs12_val, password)
credentials = cls(
json_data['_service_account_email'],
signer,
scopes=json_data['_scopes'],
private_key_id=json_data['_private_key_id'],
client_id=json_data['client_id'],
user_agent=json_data['_user_agent'],
**json_data['_kwargs']
)
if private_key_pkcs8_pem is not None:
credentials._private_key_pkcs8_pem = private_key_pkcs8_pem
if pkcs12_val is not None:
credentials._private_key_pkcs12 = pkcs12_val
if password is not None:
credentials._private_key_password = password
credentials.invalid = json_data['invalid']
credentials.access_token = json_data['access_token']
credentials.token_uri = json_data['token_uri']
credentials.revoke_uri = json_data['revoke_uri']
token_expiry = json_data.get('token_expiry', None)
if token_expiry is not None:
credentials.token_expiry = datetime.datetime.strptime(
token_expiry, EXPIRY_FORMAT)
return credentials
def create_scoped_required(self):
return not self._scopes
def create_scoped(self, scopes):
result = self.__class__(self._service_account_email,
self._signer,
scopes=scopes,
private_key_id=self._private_key_id,
client_id=self.client_id,
user_agent=self._user_agent,
**self._kwargs)
result.token_uri = self.token_uri
result.revoke_uri = self.revoke_uri
result._private_key_pkcs8_pem = self._private_key_pkcs8_pem
result._private_key_pkcs12 = self._private_key_pkcs12
result._private_key_password = self._private_key_password
return result
def create_with_claims(self, claims):
"""Create credentials that specify additional claims.
Args:
claims: dict, key-value pairs for claims.
Returns:
ServiceAccountCredentials, a copy of the current service account
credentials with updated claims to use when obtaining access tokens.
"""
new_kwargs = dict(self._kwargs)
new_kwargs.update(claims)
result = self.__class__(self._service_account_email,
self._signer,
scopes=self._scopes,
private_key_id=self._private_key_id,
client_id=self.client_id,
user_agent=self._user_agent,
**new_kwargs)
result.token_uri = self.token_uri
result.revoke_uri = self.revoke_uri
result._private_key_pkcs8_pem = self._private_key_pkcs8_pem
result._private_key_pkcs12 = self._private_key_pkcs12
result._private_key_password = self._private_key_password
return result
def create_delegated(self, sub):
"""Create credentials that act as domain-wide delegation of authority.
Use the ``sub`` parameter as the subject to delegate on behalf of
that user.
For example::
>>> account_sub = 'foo@email.com'
>>> delegate_creds = creds.create_delegated(account_sub)
Args:
sub: string, An email address that this service account will
act on behalf of (via domain-wide delegation).
Returns:
ServiceAccountCredentials, a copy of the current service account
updated to act on behalf of ``sub``.
"""
return self.create_with_claims({'sub': sub})
def _datetime_to_secs(utc_time):
# TODO(issue 298): use time_delta.total_seconds()
# time_delta.total_seconds() not supported in Python 2.6
epoch = datetime.datetime(1970, 1, 1)
time_delta = utc_time - epoch
return time_delta.days * 86400 + time_delta.seconds
class _JWTAccessCredentials(ServiceAccountCredentials):
"""Self signed JWT credentials.
Makes an assertion to server using a self signed JWT from service account
credentials. These credentials do NOT use OAuth 2.0 and instead
authenticate directly.
"""
_MAX_TOKEN_LIFETIME_SECS = 3600
"""Max lifetime of the token (one hour, in seconds)."""
def __init__(self,
service_account_email,
signer,
scopes=None,
private_key_id=None,
client_id=None,
user_agent=None,
token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI,
additional_claims=None):
if additional_claims is None:
additional_claims = {}
super(_JWTAccessCredentials, self).__init__(
service_account_email,
signer,
private_key_id=private_key_id,
client_id=client_id,
user_agent=user_agent,
token_uri=token_uri,
revoke_uri=revoke_uri,
**additional_claims)
def authorize(self, http):
"""Authorize an httplib2.Http instance with a JWT assertion.
Unless specified, the 'aud' of the assertion will be the base
uri of the request.
Args:
http: An instance of ``httplib2.Http`` or something that acts
like it.
Returns:
A modified instance of http that was passed in.
Example::
h = httplib2.Http()
h = credentials.authorize(h)
"""
request_orig = http.request
request_auth = super(_JWTAccessCredentials, self).authorize(http).request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
if 'aud' in self._kwargs:
# Preemptively refresh token, this is not done for OAuth2
if self.access_token is None or self.access_token_expired:
self.refresh(None)
return request_auth(uri, method, body,
headers, redirections,
connection_type)
else:
# If we don't have an 'aud' (audience) claim,
# create a 1-time token with the uri root as the audience
headers = _initialize_headers(headers)
_apply_user_agent(headers, self.user_agent)
uri_root = uri.split('?', 1)[0]
token, unused_expiry = self._create_token({'aud': uri_root})
headers['Authorization'] = 'Bearer ' + token
return request_orig(uri, method, body,
clean_headers(headers),
redirections, connection_type)
# Replace the request method with our own closure.
http.request = new_request
return http
def get_access_token(self, http=None, additional_claims=None):
"""Create a signed jwt.
Args:
http: unused
additional_claims: dict, additional claims to add to
the payload of the JWT.
Returns:
An AccessTokenInfo with the signed jwt
"""
if additional_claims is None:
if self.access_token is None or self.access_token_expired:
self.refresh(None)
return AccessTokenInfo(access_token=self.access_token,
expires_in=self._expires_in())
else:
# Create a 1 time token
token, unused_expiry = self._create_token(additional_claims)
return AccessTokenInfo(access_token=token,
expires_in=self._MAX_TOKEN_LIFETIME_SECS)
def revoke(self, http):
"""Cannot revoke JWTAccessCredentials tokens."""
pass
def create_scoped_required(self):
# JWTAccessCredentials are unscoped by definition
return True
def create_scoped(self, scopes, token_uri=GOOGLE_TOKEN_URI,
revoke_uri=GOOGLE_REVOKE_URI):
# Returns an OAuth2 credentials with the given scope
result = ServiceAccountCredentials(self._service_account_email,
self._signer,
scopes=scopes,
private_key_id=self._private_key_id,
client_id=self.client_id,
user_agent=self._user_agent,
token_uri=token_uri,
revoke_uri=revoke_uri,
**self._kwargs)
if self._private_key_pkcs8_pem is not None:
result._private_key_pkcs8_pem = self._private_key_pkcs8_pem
if self._private_key_pkcs12 is not None:
result._private_key_pkcs12 = self._private_key_pkcs12
if self._private_key_password is not None:
result._private_key_password = self._private_key_password
return result
def refresh(self, http):
self._refresh(None)
def _refresh(self, http_request):
self.access_token, self.token_expiry = self._create_token()
def _create_token(self, additional_claims=None):
now = _UTCNOW()
expiry = now + datetime.timedelta(seconds=self._MAX_TOKEN_LIFETIME_SECS)
payload = {
'iat': _datetime_to_secs(now),
'exp': _datetime_to_secs(expiry),
'iss': self._service_account_email,
'sub': self._service_account_email
}
payload.update(self._kwargs)
if additional_claims is not None:
payload.update(additional_claims)
jwt = crypt.make_signed_jwt(self._signer, payload,
key_id=self._private_key_id)
return jwt.decode('ascii'), expiry
|
dchaplinsky/declarations.com.ua | refs/heads/master | declarations_site/cms_pages/migrations/0024_auto_20191105_0145.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-11-04 23:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms_pages', '0023_auto_20180419_2357'),
]
operations = [
migrations.AddField(
model_name='homepagebottommenulink',
name='caption_en',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='homepagetopmenulink',
name='caption_en',
field=models.CharField(blank=True, max_length=255),
),
]
|
kntem/webdeposit | refs/heads/webdeposit-final | modules/miscutil/lib/inveniocfg_upgrader_model.py | 3 | # -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02D111-1307, USA.
"""
inveniocfg_upgrade database model.
"""
# General imports.
from invenio.sqlalchemyutils import db
# Create your models here.
class Upgrade(db.Model):
"""Represents an Upgrade record."""
__tablename__ = 'upgrade'
upgrade = db.Column(db.String(255), primary_key=True, nullable=False)
applied = db.Column(db.DateTime, nullable=False)
|
2014cdbg4/2015cd_midterm | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/gc.py | 743 | """This module provides access to the garbage collector for reference cycles.
enable() -- Enable automatic garbage collection.
disable() -- Disable automatic garbage collection.
isenabled() -- Returns true if automatic collection is enabled.
collect() -- Do a full collection right now.
get_count() -- Return the current collection counts.
set_debug() -- Set debugging flags.
get_debug() -- Get debugging flags.
set_threshold() -- Set the collection thresholds.
get_threshold() -- Return the current the collection thresholds.
get_objects() -- Return a list of all objects tracked by the collector.
is_tracked() -- Returns true if a given object is tracked.
get_referrers() -- Return the list of objects that refer to an object.
get_referents() -- Return the list of objects that an object refers to.
"""
DEBUG_COLLECTABLE = 2
DEBUG_LEAK = 38
DEBUG_SAVEALL = 32
DEBUG_STATS = 1
DEBUG_UNCOLLECTABLE = 4
class __loader__:
pass
callbacks = []
def collect(*args,**kw):
"""collect([generation]) -> n
With no arguments, run a full collection. The optional argument
may be an integer specifying which generation to collect. A ValueError
is raised if the generation number is invalid.
The number of unreachable objects is returned.
"""
pass
def disable(*args,**kw):
"""disable() -> None
Disable automatic garbage collection.
"""
pass
def enable(*args,**kw):
"""enable() -> None
Enable automatic garbage collection.
"""
pass
garbage = []
def get_count(*args,**kw):
"""get_count() -> (count0, count1, count2)
Return the current collection counts
"""
pass
def get_debug(*args,**kw):
"""get_debug() -> flags
Get the garbage collection debugging flags.
"""
pass
def get_objects(*args,**kw):
"""get_objects() -> [...]
Return a list of objects tracked by the collector (excluding the list
returned).
"""
pass
def get_referents(*args,**kw):
"""get_referents(*objs) -> list Return the list of objects that are directly referred to by objs."""
pass
def get_referrers(*args,**kw):
"""get_referrers(*objs) -> list Return the list of objects that directly refer to any of objs."""
pass
def get_threshold(*args,**kw):
"""get_threshold() -> (threshold0, threshold1, threshold2)
Return the current collection thresholds
"""
pass
def is_tracked(*args,**kw):
"""is_tracked(obj) -> bool
Returns true if the object is tracked by the garbage collector.
Simple atomic objects will return false.
"""
pass
def isenabled(*args,**kw):
"""isenabled() -> status
Returns true if automatic garbage collection is enabled.
"""
pass
def set_debug(*args,**kw):
"""set_debug(flags) -> None
Set the garbage collection debugging flags. Debugging information is
written to sys.stderr.
flags is an integer and can have the following bits turned on:
DEBUG_STATS - Print statistics during collection.
DEBUG_COLLECTABLE - Print collectable objects found.
DEBUG_UNCOLLECTABLE - Print unreachable but uncollectable objects found.
DEBUG_SAVEALL - Save objects to gc.garbage rather than freeing them.
DEBUG_LEAK - Debug leaking programs (everything but STATS).
"""
pass
def set_threshold(*args,**kw):
"""set_threshold(threshold0, [threshold1, threshold2]) -> None
Sets the collection thresholds. Setting threshold0 to zero disables
collection.
"""
pass
|
ShashaQin/erpnext | refs/heads/develop | erpnext/setup/doctype/target_detail/target_detail.py | 121 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class TargetDetail(Document):
pass |
clchiou/garage | refs/heads/master | py/garage/garage/formatters/json.py | 1 | """Helpers for standard library's json package."""
__all__ = [
'encode_datetime',
'encode_mapping',
'join_encoders',
]
import datetime
from collections import Mapping
from collections import OrderedDict
from garage import datetimes
def _type_error(obj):
return TypeError(repr(obj) + ' is not JSON serializable')
def encode_datetime(obj, datetime_format=None):
if not isinstance(obj, datetime.datetime):
raise _type_error(obj)
if datetime_format is None:
return datetimes.format_iso8601(obj)
else:
return obj.strftime(datetime_format)
def encode_mapping(obj):
if not isinstance(obj, Mapping):
raise _type_error(obj)
if isinstance(obj, OrderedDict):
return obj
else:
# Preserve ordering in the Mapping object.
return OrderedDict(obj.items())
def join_encoders(*encoders):
def encoder(obj):
for enc in encoders:
try:
return enc(obj)
except TypeError:
pass
raise _type_error(obj)
return encoder
|
Snifer/BurpSuite-Plugins | refs/heads/master | faraday/model/controller.py | 2 | '''
Faraday Penetration Test IDE - Community Version
Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/)
See the file 'doc/LICENSE' for the license information
'''
import time
import threading
import Queue
import traceback
import datetime
import model.common # this is to make sure the factory is created
import model.hosts
from config.configuration import getInstanceConfiguration
from model.common import TreeWordsTries
from model.container import ModelObjectContainer
from utils.logs import getLogger
import model.api as api
#import model.guiapi as guiapi
from model.guiapi import notification_center as notifier
from gui.customevents import *
from model.workspace import WorkspaceSyncronizer
from utils.decorators import lockModel
from utils.common import get_hash
from model.conflict import Conflict, ConflictUpdate
#XXX: consider re-writing this module! There's alot of repeated code
# and things are really messy
CONF = getInstanceConfiguration()
class modelactions:
ADDHOST = 2000
DELHOST = 2001
ADDINTERFACE = 2002
DELINTERFACE = 2003
ADDSERVICEINT = 2004
ADDSERVICEAPP = 2005
DELSERVICEINT = 2006
DELSERVICEAPP = 2007
DELSERVICEHOST = 2008
ADDAPPLICATION = 2009
DELAPPLICATION = 2010
ADDCATEGORY = 2011
DELCATEGORY = 2012
ADDVULNINT = 2013
DELVULNINT = 2014
ADDVULNAPP = 2015
DELVULNAPP = 2016
ADDVULNHOST = 2017
DELVULNHOST = 2018
ADDVULNSRV = 2019
DELVULNSRV = 2020
ADDNOTEINT = 2021
DELNOTEINT = 2022
ADDNOTEAPP = 2023
DELNOTEAPP = 2024
ADDNOTEHOST = 2025
DELNOTEHOST = 2026
ADDNOTESRV = 2027
DELNOTESRV = 2028
RENAMEROOT = 2029
ADDNOTEVULN = 2030
DELNOTEVULN = 2031
EDITHOST = 2032
EDITINTERFACE = 2033
EDITAPPLICATION = 2034
EDITSERVICE = 2035
ADDCREDSRV = 2036
DELCREDSRV = 2037
ADDVULNWEBSRV = 2038
DELVULNWEBSRV = 2039
ADDNOTENOTE = 2040
DELNOTENOTE = 2041
EDITNOTE = 2042
EDITVULN = 2043
ADDNOTE = 2044
DELNOTE = 2045
ADDVULN = 2046
DELVULN = 2047
EDITCRED = 2048
ADDCRED = 2049
DELCRED = 2050
__descriptions = {
ADDHOST: "ADDHOST",
DELHOST: "DELHOST",
ADDINTERFACE: "ADDINTERFACE",
DELINTERFACE: "DELINTERFACE",
ADDSERVICEINT: "ADDSERVICEINT",
ADDSERVICEAPP: "ADDSERVICEAPP",
DELSERVICEINT: "DELSERVICEINT",
DELSERVICEAPP: "DELSERVICEAPP",
DELSERVICEHOST: "DELSERVICEHOST",
ADDAPPLICATION: "ADDAPPLICATION",
DELAPPLICATION: "DELAPPLICATION",
ADDCATEGORY: "ADDCATEGORY",
DELCATEGORY: "DELCATEGORY",
ADDVULNINT: "ADDVULNINT",
DELVULNINT: "DELVULNINT",
ADDVULNAPP: "ADDVULNAPP",
DELVULNAPP: "DELVULNAPP",
ADDVULNHOST: "ADDVULNHOST",
DELVULNHOST: "DELVULNHOST",
ADDVULNSRV: "ADDVULNSRV",
DELVULNSRV: "DELVULNSRV",
ADDNOTEVULN: "ADDNOTEVULN",
DELNOTEVULN: "DELNOTEVULN",
ADDNOTENOTE: "ADDNOTENOTE",
DELNOTENOTE: "DELNOTENOTE",
EDITHOST: "EDITHOST",
EDITINTERFACE: "EDITINTERFACE",
EDITAPPLICATION: "EDITAPPLICATION",
EDITSERVICE: "EDITAPPLICATION",
ADDCREDSRV: "ADDCREDSRV",
DELCREDSRV: "DELCREDSRV",
ADDVULNWEBSRV: "ADDVULNSWEBRV",
DELVULNWEBSRV: "DELVULNWEBSRV",
EDITNOTE: "EDITNOTE",
EDITVULN: "EDITVULN",
EDITCRED: "EDITCRED",
ADDNOTE: "ADDNOTE",
DELNOTE: "DELNOTE",
ADDVULN: "ADDVULN",
DELVULN: "DELVULN",
ADDCRED: "ADDCRED",
DELCRED: "DELCRED"
}
@staticmethod
def getDescription(action):
return modelactions.__descriptions.get(action, "")
class ModelController(threading.Thread):
def __init__(self, security_manager):
threading.Thread.__init__(self)
self.__sec = security_manager
# set as daemon
self.setDaemon(True)
#TODO: think of a good way to handle cross reference between hosts and
#categories
self._categories = {}
self._categories[CONF.getDefaultCategory()] = []
# dictionary with host ids as key
self._hosts = None
# flag to stop daemon thread
self._stop = False
# locks needed to make model thread-safe
self._hosts_lock = threading.RLock()
#TODO: check if it is better using collections.deque
# a performance analysis should be done
# http://docs.python.org/library/collections.html#collections.deque
# the actions queue
self._pending_actions = Queue.Queue()
# a reference to the ModelObjectFactory
self._object_factory = model.common.factory
self._registerObjectTypes()
# sync api request flag. This flag is used to let the model know
# there's some other object trying to use a sync api, and it should
# give priority to that and stop processing the queue
self._sync_api_request = False
# This flag & lock are used when the complete model is being persisted
self._saving_model_flag = False
self._saving_model_lock = threading.RLock()
self._actionDispatcher = None
self._setupActionDispatcher()
self._workspace = None
self.objects_with_updates = []
#used to highligthing
self.treeWordsTries = TreeWordsTries()
def __getattr__(self, name):
getLogger(self).debug("ModelObject attribute to refactor: %s" % name)
def _getValueByID(self, attrName, ID):
"""
attribute passed as a parameter MUST BE a dictionary indexed with a
string ID
if id is found as a part of a key it returns the object
it returns None otherwise
"""
if ID:
hash_id = get_hash([ID])
ref = self.__getattribute__(attrName)
# we are assuming the value is unique inside the object ID's
#for key in ref:
for key in ref.keys():
#XXX: this way of checking the ids doesn't allow us to use a real hash as key
# because we are checking if "id" is part of the key... not a good way of
# dealing with this...
if hash_id == key or ID == key:
return ref[key]
# if id (hash) was not found then we try with element names
for element in ref.itervalues():
#if id in element.name:
if ID == element.name:
return element
return None
def _addValue(self, attrName, newValue, setparent=False, update=False):
# attribute passed as a parameter MUST BE the name
# of an internal attribute which is a dictionary indexed
# with a string ID
valID = newValue.getID()
ref = self.__getattribute__(attrName)
#if valID not in ref or update:
if valID not in ref or update:
#TODO: Is this necesary?
if setparent:
newValue.setParent(self)
ref[valID] = newValue
return True
#return not update
return False
def __acquire_host_lock(self):
self._saving_model_lock.acquire()
self._saving_model_lock.release()
self._hosts_lock.acquire()
def __release_host_lock(self):
try:
self._hosts_lock.release()
except RuntimeError:
pass
def _registerObjectTypes(self):
"""
Registers in the factory all object types that can be created
"""
# This could be done in hosts module, but it seems easier to maintain
# if we have all in one place inside the controller
self._object_factory.register(model.hosts.Host)
self._object_factory.register(model.hosts.Interface)
self._object_factory.register(model.hosts.Service)
self._object_factory.register(model.hosts.HostApplication)
self._object_factory.register(model.common.ModelObjectVuln)
self._object_factory.register(model.common.ModelObjectVulnWeb)
self._object_factory.register(model.common.ModelObjectNote)
self._object_factory.register(model.common.ModelObjectCred)
def _setupActionDispatcher(self):
self._actionDispatcher = {
modelactions.ADDHOST: self.__addHost,
modelactions.DELHOST: self.__delHost,
modelactions.EDITHOST: self.__editHost,
modelactions.ADDINTERFACE: self.__addInterfaceToHost,
modelactions.DELINTERFACE: self.__delInterfaceFromHost,
modelactions.EDITINTERFACE: self.__editInterface,
modelactions.ADDSERVICEINT: self.__addServiceToInterface,
modelactions.ADDSERVICEAPP: self.__addServiceToApplication,
modelactions.DELSERVICEINT: self.__delServiceFromInterface,
modelactions.DELSERVICEAPP: self.__delServiceFromApplication,
modelactions.DELSERVICEHOST: self.__delService,
modelactions.EDITSERVICE: self.__editService,
modelactions.ADDAPPLICATION: self.__addApplication,
modelactions.DELAPPLICATION: self.__delApplication,
modelactions.EDITAPPLICATION: self.__editApplication,
modelactions.ADDCATEGORY: self.__addCategory,
modelactions.DELCATEGORY: self.__delCategory,
#Vulnerability
modelactions.ADDVULNINT: self.__addVulnerabilityToInterface,
modelactions.DELVULNINT: self.__delVulnerabilityFromInterface,
modelactions.ADDVULNAPP: self.__addVulnerabilityToApplication,
modelactions.DELVULNAPP: self.__delVulnerabilityFromApplication,
modelactions.ADDVULNHOST: self.__addVulnerabilityToHost,
modelactions.DELVULNHOST: self.__delVulnerabilityFromHost,
modelactions.ADDVULNSRV: self.__addVulnerabilityToService,
modelactions.DELVULNSRV: self.__delVulnerabilityFromService,
modelactions.ADDVULN: self.__addVulnToModelObject,
modelactions.DELVULN: self.__delVulnFromModelObject,
modelactions.ADDVULNWEBSRV: self.__addVulnerabilityToService,
modelactions.DELVULNWEBSRV: self.__delVulnerabilityFromService,
modelactions.EDITVULN: self.__editVulnerability,
#Note
modelactions.ADDNOTEINT: self.__addNoteToInterface,
modelactions.DELNOTEINT: self.__delNoteFromInterface,
modelactions.ADDNOTEAPP: self.__addNoteToApplication,
modelactions.DELNOTEAPP: self.__delNoteFromApplication,
modelactions.ADDNOTEHOST: self.__addNoteToHost,
modelactions.DELNOTEHOST: self.__delNoteFromHost,
modelactions.ADDNOTESRV: self.__addNoteToService,
modelactions.DELNOTESRV: self.__delNoteFromService,
modelactions.ADDNOTEVULN: self.__addNote,
modelactions.DELNOTEVULN: self.__delNote,
modelactions.ADDNOTE: self.__addNoteToModelObject,
modelactions.DELNOTE: self.__delNoteFromModelObject,
modelactions.ADDCREDSRV: self.__addCredToService,
modelactions.DELCREDSRV: self.__delCredFromService,
modelactions.ADDNOTENOTE: self.__addNoteToServiceNote,
modelactions.DELNOTENOTE: self.__delNoteFromServiceNote,
modelactions.EDITNOTE: self.__editNote,
modelactions.EDITCRED: self.__editCred,
modelactions.ADDCRED: self.__addCredToModelObject,
modelactions.DELCRED: self.__delCredFromModelObject
}
def run(self):
return self._main()
def stop(self):
"""
Sets the flag to stop daemon
"""
self._stop = True
def _dispatchActionWithLock(self, action_callback, *args):
res = False
self.__acquire_host_lock()
try:
res = action_callback(*args)
except Exception:
api.log("An exception occurred while dispatching an action (%r(%r)\n%s" %
(action_callback, args, traceback.format_exc()), "ERROR")
finally:
self.__release_host_lock()
return res
def _processAction(self, action, parameters, sync=False):
"""
decodes and performs the action given
It works kind of a dispatcher
"""
if sync:
self._sync_api_request = True
api.devlog("_processAction - %s - parameters = %s" %
(action, str(parameters)))
action_callback = self._actionDispatcher[action]
res = self._dispatchActionWithLock(action_callback, *parameters)
# finally we notify the widgets about this change
#if res: # notify only if action was done successfuly
#self._notifyModelUpdated(*parameters)
#else:
if not res:
api.devlog("Action code %d failed. Parameters = %s" %
(action, str(parameters)))
if sync:
self._sync_api_request = False
def getConflicts(self):
conflicts = []
for obj in self.objects_with_updates:
conflicts += obj.getUpdates()
return conflicts
def resolveConflicts(self):
notifier.conflictResolution(self.getConflicts())
def resolveConflict(self, conflict, kwargs):
if conflict.resolve(kwargs):
if conflict.getModelObjectType() == "Interface":
ipv4 = kwargs['ipv4']
ipv6 = kwargs['ipv6']
hostnames = kwargs['hostnames']
if not ipv4['address'] in ["0.0.0.0", None]:
self.treeWordsTries.removeWord(ipv4['address'])
self.treeWordsTries.addWord(ipv4['address'])
if not ipv6['address'] in ["0000:0000:0000:0000:0000:0000:0000:0000", None]:
self.treeWordsTries.removeWord(ipv6['address'])
self.treeWordsTries.addWord(ipv6['address'])
for h in hostnames:
if h is not None:
self.treeWordsTries.removeWord(h)
self.treeWordsTries.addWord(h)
notifier.conflictUpdate(-1)
notifier.editHost(conflict.getFirstObject().getHost())
#self._notifyModelUpdated()
def removeConflictsByObject(self, obj):
if obj in self.objects_with_updates:
self.objects_with_updates.remove(obj)
notifier.conflictUpdate(-len(obj.getUpdates()))
def setSavingModel(self, value):
api.devlog("setSavingModel: %s" % value)
self._saving_model_flag = value
if value:
self._saving_model_lock.acquire()
else:
try:
self._saving_model_lock.release()
except RuntimeError:
pass
def _main(self):
"""
The main method for the thread.
The controller will be constantly checking a queue
to see if new actions were added.
This will make host addition and removal "thread-safe" and will
avoid locking components that need to interact with the model
"""
while True:
# check if thread must finish
if self._stop:
return
# first we check if there is a sync api request
# or if the model is being saved/sync'ed
# or if we have pending duplicated hosts that need to be
# merged by the user
if not self._sync_api_request and not self._saving_model_flag:
self.processAction()
else:
# there is some object requesting for a sync api so we
# sleep the thread execution for a moment to let others work
# XXX: check if this time is not too much...
time.sleep(0.01)
def processAllPendingActions(self):
[self.processAction() for i in range(self._pending_actions.qsize())]
def processAction(self):
# check the queue for new actions
# if there is no new action it will block until timeout is reached
try:
# get new action or timeout (in secs)
#TODO: timeout should be set through config
current_action = self._pending_actions.get(timeout=2)
action = current_action[0]
parameters = current_action[1:]
# dispatch the action
self._processAction(action, parameters)
except Queue.Empty:
# if timeout was reached, just let the daemon run again
# this is done just to be able to test the stop flag
# because if we don't do it, the daemon will be blocked forever
pass
except Exception:
getLogger(self).devlog("something strange happened... unhandled exception?")
getLogger(self).devlog(traceback.format_exc())
def sync_lock(self):
self._sync_api_request = True
self.__acquire_host_lock()
def sync_unlock(self):
self._sync_api_request = False
self.__release_host_lock()
def __addCategory(self, category):
if category not in self._categories:
self._categories[category] = []
def getAllCategories(self):
return self._categories
def __delCategory(self, category, recursive=False):
"""
Removes a given category from the model.
If recursive is True it will also delete all hosts inside the category.
If recursive is False it will first move the hosts in the category, to the
default category "ALL" and then remove the desired category
"""
#XXX: test this implementation...
if category in self._categories:
if recursive:
for cat in self._categories[category]:
self.__delHost(cat)
else:
# we just move all hosts to category ALL and delete the category
for host_id in self._categories[category]:
host = self._getValueByID("_hosts", host_id)
if host is not None:
host.categories.remove(category)
host.categories.append(CONF.getDefaultCategory())
# adding, deleting and moving hosts in categories seem to be thread
# safe operations, so we don't need to acquire any lock
def __addHostToCategory(self, host, category):
# we always try to create the category to make it safe.
# If category exists it just won't do anything
self.__addCategory(category)
self._categories[category].append(host.getID())
if category not in host.categories:
host.registerCategory(category)
def __delHostFromCategory(self, host, category):
self._categories[category].remove(host.getID())
host.removeCategory(category)
def moveHostToCategory(self, hostname, category):
host = self._getValueByID("_hosts", hostname)
if host is not None:
self.__delHostFromCategory(host, host.getCurrentCategory())
self.__addHostToCategory(host, category)
return True
return False
# TODO: CATEGORIES APIS are still missing...
# also we need some "move" api to be used when drag & drop
# exists in the host browser
# TODO: >>> APIs <<< we have to know which plugin called the apis to store
# in the history
def __addPendingAction(self, *args):
"""
Adds a new pending action to the queue
Action is build with generic args tuple.
The caller of this function has to build the action in the right
way since no checks are preformed over args
"""
new_action = args
self._pending_actions.put(new_action)
def addCategoryASYNC(self, category):
self.__addPendingAction(modelactions.ADDCATEGORY, category)
def addCategorySYNC(self, category):
#self.sync_lock()
#self.__addCategory(category)
#self.sync_unlock()
self._processAction(modelactions.ADDCATEGORY, category, sync=True)
def delCategoryASYNC(self, category, recursive=False):
self.__addPendingAction(modelactions.DELCATEGORY, category, recursive)
def delCategorySYNC(self, category, recursive=False):
self._processAction(modelactions.DELCATEGORY, [category, recursive], sync=True)
def addUpdate(self, old_object, new_object):
# Returns True if the update was resolved without user interaction
res = True
try:
mergeAction = old_object.addUpdate(new_object)
if mergeAction:
if old_object not in self.objects_with_updates:
self.objects_with_updates.append(old_object)
notifier.conflictUpdate(1)
res = False
except:
res = False
api.devlog("(%s).addUpdate(%s, %s) - failed" %
(self, old_object, new_object))
return res
def addHostASYNC(self, host, category=None, update=False, old_hostname=None):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
new host must be added to the model
"""
self.__addPendingAction(modelactions.ADDHOST, host, category, update, old_hostname)
def addHostSYNC(self, host, category=None, update=False, old_hostname=None):
"""
SYNC API
Adds a host directly to the model
"""
self._processAction(modelactions.ADDHOST, [host, category, update, old_hostname], sync=True)
def __addHost(self, host, category, update=False, old_hostname=None):
res = False
#self.__acquire_host_lock()
old_host = self._getValueByID("_hosts", host.getID())
if old_host:
res = self.addUpdate(old_host, host)
else:
res = self._addValue("_hosts", host, update=update)
if res:
host.setParent(None)
if category is None:
category = CONF.getDefaultCategory()
elif category not in self._categories:
self.__addCategory(category)
self.treeWordsTries.addWord(host.getName())
self.treeWordsTries.addWord(category)
self.__addHostToCategory(host, category)
notifier.addHost(host)
#self.__release_host_lock()
return res
def delHostASYNC(self, host):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
particular host must be removed from the model
"""
self.__addPendingAction(modelactions.DELHOST, host)
def delHostSYNC(self, host):
"""
SYNC API
Deletes a host from model
"""
self._processAction(modelactions.DELHOST, [host], sync=True)
def __clearHost(self, host):
self.__clearModelObject(host)
self.__delInterfaces(host)
def __clearInterface(self, interface):
self.__clearModelObject(interface)
self.__delServices(interface)
def __clearApplication(self, application):
self.__clearModelObject(application)
self.__delServices(application)
def __clearService(self, service):
self.__clearModelObject(service)
def __clearNote(self, note):
self.__clearModelObject(note)
def __clearVuln(self, vuln):
self.__clearModelObject(vuln)
def __clearCred(self, cred):
self.__clearModelObject(cred)
def __clearModelObject(self, modelObj):
self.removeConflictsByObject(modelObj)
self.__delNotes(modelObj)
self.__delVulns(modelObj)
self.__delCreds(modelObj)
def __delNotes(self, modelObj):
for note in list(modelObj.getNotes()):
self.__clearNote(note)
modelObj.delNote(note.getID())
def __delVulns(self, modelObj):
for vuln in list(modelObj.getVulns()):
self.__clearVuln(vuln)
modelObj.delVuln(vuln.getID())
def __delCreds(self, modelObj):
for cred in list(modelObj.getCreds()):
self.__clearCred(cred)
modelObj.delCred(cred.getID())
def __delInterfaces(self, modelObj):
for interface in list(modelObj.getAllInterfaces()):
self.__clearInterface(interface)
modelObj.delInterface(interface.getID())
def __delServices(self, modelObj):
for service in list(modelObj.getAllServices()):
self.__clearService(service)
modelObj.delService(service.getID())
def __delHost(self, host_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
#res = self._delValue("_hosts", host.getID())
#if res:
self.__clearHost(host)
#this next method removes the host
self._workspace.remove(host)
self.treeWordsTries.removeWord(host.getName())
for i in host.getAllInterfaces():
for h in i.getHostnames():
self.treeWordsTries.removeWord(h)
notifier.delHost(host.getID())
res = True
return res
def _delValue(self, attrName, valID):
# attribute passed as a parameter MUST BE the name
# of an internal attribute which is a dictionary indexed
# with a string ID
api.devlog("(%s)._delValue(%s, %s)" % (self, attrName, valID))
ref = self.__getattribute__(attrName)
api.devlog("ref.keys() = %s" % ref.keys())
if valID in ref:
del ref[valID]
return True
hash_id = get_hash([valID])
if hash_id in ref:
del ref[hash_id]
return True
for element in ref.itervalues():
if valID == element.name:
del ref[element.getID()]
return True
# none of the ids were found
return False
def editHostSYNC(self, host, name, description, os, owned):
"""
SYNC API
Modifies a host from model
"""
self._processAction(modelactions.EDITHOST, [host, name, description, os, owned], sync=True)
def __editHost(self, host, name=None, description=None, os=None, owned=None):
res = False
#host = self._getValueByID("_hosts", host)
if host is not None:
host.updateAttributes(name, description, os, owned)
res = True
notifier.editHost(host)
return res
def addInterfaceASYNC(self, host, interface, update=False):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
new interface must be added to a specific host
"""
self.__addPendingAction(modelactions.ADDINTERFACE, host, interface)
def addInterfaceSYNC(self, host, interface, update=False):
"""
SYNC API
Adds interface directly to the model
"""
self._processAction(modelactions.ADDINTERFACE, [host, interface], sync=True)
def __addInterfaceToHost(self, host_id, interface):
res = False
#self.__acquire_host_lock()
# if host is not found nothing is done with the new interface
try:
host = self._getValueByID("_hosts", host_id)
if host is not None:
old_interface = host.getInterface(interface.getID())
if old_interface:
res = self.addUpdate(old_interface, interface)
else:
res = host.addInterface(interface)
if res:
self.treeWordsTries.addWord(interface.name)
for h in interface.getHostnames():
self.treeWordsTries.addWord(h)
notifier.editHost(host)
except Exception as e:
raise e
#self.__release_host_lock()
return res
def delInterfaceASYNC(self, host, interface_name):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
particular host must be removed from the model
"""
self.__addPendingAction(modelactions.DELINTERFACE, host, interface_name)
def delInterfaceSYNC(self, host, interface_name):
"""
SYNC API
Deletes an interface from model
"""
self._processAction(modelactions.DELINTERFACE, [host, interface_name], sync=True)
def __delInterfaceFromHost(self, host_id, interface_id):
res = False
#self.__acquire_host_lock()
# DO NOT USE self.getHost because it will cause a deadlock
host = self._getValueByID("_hosts", host_id)
if host is not None:
interface = host.getInterface(interface_id)
if interface is not None:
res = host.delInterface(interface.getID())
self.__clearInterface(interface)
self.treeWordsTries.removeWord(interface.name)
for h in interface.getHostnames():
self.treeWordsTries.removeWord(h)
notifier.editHost(host)
return res
def editInterfaceSYNC(self, interface, name, description, hostnames,
mac, ipv4, ipv6, network_segment,
amount_ports_opened, amount_ports_closed,
amount_ports_filtered, owned):
"""
SYNC API
Modifies an interface from model
"""
self._processAction(modelactions.EDITINTERFACE,
[interface, name, description, hostnames,
mac, ipv4, ipv6, network_segment,
amount_ports_opened, amount_ports_closed,
amount_ports_filtered, owned], sync=True)
def __editInterface(self, interface, name, description, hostnames,
mac, ipv4, ipv6, network_segment,
amount_ports_opened, amount_ports_closed,
amount_ports_filtered, owned):
res = False
if interface is not None:
interface.updateAttributes(name, description, hostnames, mac,
ipv4, ipv6, network_segment,
amount_ports_opened,
amount_ports_closed,
amount_ports_filtered, owned)
if not ipv4['address'] in ["0.0.0.0", None]:
self.treeWordsTries.removeWord(ipv4['address'])
self.treeWordsTries.addWord(ipv4['address'])
if not ipv6['address'] in ["0000:0000:0000:0000:0000:0000:0000:0000", None]:
self.treeWordsTries.removeWord(ipv6['address'])
self.treeWordsTries.addWord(ipv6['address'])
for h in hostnames:
if h is not None:
self.treeWordsTries.removeWord(h)
self.treeWordsTries.addWord(h)
notifier.editHost(interface.getHost())
res = True
return res
def addApplicationASYNC(self, host, application):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
new application must be added to a specific host
"""
self.__addPendingAction(modelactions.ADDAPPLICATION, host, application)
def addApplicationSYNC(self, host, application):
"""
SYNC API
Adds an application to a specific host
directly to the model
"""
self._processAction(modelactions.ADDAPPLICATION, [host, application], sync=True)
def __addApplication(self, host_id, application):
res = False
#self.__acquire_host_lock()
# if host is not found nothing is done with the new interface
host = self._getValueByID("_hosts", host_id)
if host is not None:
old_application = host.getApplication(application.getID())
if old_application:
res = self.addUpdate(old_application, application)
else:
res = host.addApplication(application)
notifier.editHost(host)
#self.__release_host_lock()
return res
def delApplicationASYNC(self, host, app_name):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
particular host must be removed from the model
"""
self.__addPendingAction(modelactions.DELAPPLICATION, host, app_name)
def delApplicationSYNC(self, host, app_name):
"""
SYNC API
Deletes an application from the model
"""
self._processAction(modelactions.DELAPPLICATION, [host, app_name], sync=True)
def __delApplication(self, host_id, app_id):
res = False
#self.__acquire_host_lock()
# DO NOT USE self.getHost because it will cause a deadlock
host = self._getValueByID("_hosts", host_id)
if host is not None:
application = host.getApplication(app_id)
if application is not None:
self.__clearApplication(application)
res = host.delApplication(application.getID())
notifier.editHost(host)
#self.__release_host_lock()
return res
def editApplicationSYNC(self, application, name, description, status, version, owned):
"""
SYNC API
Modifies a host from model
"""
self._processAction(modelactions.EDITAPPLICATION, [application, name, description, status, version, owned], sync=True)
def __editApplication(self, application, name=None, description=None, status=None, version=None, owned=None):
res = False
#host = self._getValueByID("_hosts", host)
if application is not None:
application.updateAttributes(name, description, status, version, owned)
notifier.editHost(application.getHost())
res = True
return res
def addServiceToInterfaceASYNC(self, host, interface_name, newService):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
new services must be added to a specific host in a specific interface
"""
self.__addPendingAction(modelactions.ADDSERVICEINT, host, interface_name, newService)
def addServiceToInterfaceSYNC(self, host_id, interface_id, newService):
"""
SYNC API
Adds a service to a specific host in a specific interface
directly to the model
"""
self._processAction(modelactions.ADDSERVICEINT, [host_id, interface_id, newService], sync=True)
def addServiceToApplicationASYNC(self, host, appname, newService):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
new services must be added to a specific host in a specific interface
"""
self.__addPendingAction(modelactions.ADDSERVICEAPP, host, appname, newService)
def addServiceToApplicationSYNC(self, host, appname, newService):
"""
SYNC API
Adds a service to a specific host in a specific application
directly to the model
"""
self._processAction(modelactions.ADDSERVICEAPP, [host, appname, newService], sync=True)
def __addServiceToInterface(self, host_id, interface_id, service):
res = False
#self.__acquire_host_lock()
# if host is not found nothing is done with the new interface
host = self._getValueByID("_hosts", host_id)
if host is not None:
interface = host.getInterface(interface_id)
if interface is not None:
old_service = interface.getService(service.getID())
if old_service:
res = self.addUpdate(old_service, service)
else:
res = interface.addService(service)
if res:
notifier.editHost(host)
else:
api.devlog("__addService failed. Host ID: %s not found" % host_id)
return res
def __addServiceToApplication(self, host_id, application_id, service):
res = False
#self.__acquire_host_lock()
# if host is not found nothing is done with the new interface
host = self._getValueByID("_hosts", host_id)
if host is not None:
application = host.getApplication(application_id)
if application is not None:
old_service = application.getService(service.getID())
if old_service:
res = self.addUpdate(old_service, service)
else:
res = application.addService(service)
if res:
notifier.editHost(host)
else:
api.devlog("__addService failed. Host ID: %s not found" % host_id)
return res
def delServiceFromInterfaceASYNC(self, host, interface, service):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
particular service in a host and interface must be removed from the
model Interface parameter can be "ALL"
"""
self.__addPendingAction(modelactions.DELSERVICEINT, host, interface, service)
def delServiceFromInterfaceSYNC(self, host, interface, service):
"""
SYNC API
Delete a service in a host and interface from the model
"""
self._processAction(modelactions.DELSERVICEINT, [host, interface, service], sync=True)
def delServiceFromApplicationASYNC(self, host, appname, service):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
particular service in a host and interface must be removed from the model
appname parameter can be "ALL"
"""
self.__addPendingAction(modelactions.DELSERVICEAPP, host, appname, service)
def delServiceFromApplicationSYNC(self, host, appname, service):
"""
SYNC API
Delete a service in a host and application from the model
"""
self._processAction(modelactions.DELSERVICEAPP, [host, appname, service], sync=True)
def delServiceFromHostASYNC(self, host, service):
self.__addPendingAction(modelactions.DELSERVICEHOST, host, service)
def delServiceFromHostSYNC(self, host, service):
"""
SYNC API
Delete a service from the model
"""
self._processAction(modelactions.DELSERVICEHOST, [host, service], sync=True)
def __delServiceFromInterface(self, host_id, interface_id=None, service_id=None):
res = False
api.devlog("ModelController.__delServiceFromInterface(%s, %s, %s)" %
(host_id, interface_id, service_id))
host = self._getValueByID("_hosts", host_id)
if host is not None:
if service_id is not None:
interface = host.getInterface(interface_id)
if interface is not None:
service = interface.getService(service_id)
self.__clearService(service)
res = interface.delService(service_id)
if res:
notifier.editHost(host)
return res
def __delServiceFromApplication(self, host_id, application_id=None, service_id=None):
res = False
api.devlog("ModelController.__delService(%s, %s, %s)" %
(host_id, application_id, service_id))
host = self._getValueByID("_hosts", host_id)
if host is not None:
if service_id is not None and item_id is not None:
application = host.getInterface(application_id)
if application is not None:
service = interface.getService(service_id)
self.__clearService(service)
res = application.delService(service_id)
if res:
notifier.editHost(host)
return res
def __delService(self, host_id, service_id=None):
res = False
api.devlog("ModelController.__delService(%s, %s)" %
(host_id, service_id))
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
self.__clearService(service)
res = host.delService(service_id)
if res:
notifier.editHost(host)
return res
def editServiceSYNC(self, service, name, description, protocol, ports, status, version, owned):
"""
SYNC API
Modifies a host from model
"""
self._processAction(modelactions.EDITSERVICE, [service, name, description, protocol, ports, status, version, owned], sync=True)
def editServiceASYNC(self, service, name, description, protocol, ports, status, version, owned):
"""
ASYNC API
Modifies a service from model
"""
self.__addPendingAction(modelactions.EDITSERVICE, [service, name, description, protocol, ports, status, version, owned])
def __editService(self, service, name=None, description=None,
protocol=None, ports=None, status=None,
version=None, owned=None):
res = False
if service is not None:
service.updateAttributes(name, description, protocol, ports, status, version, owned)
notifier.editHost(service.getHost())
res = True
return res
def addVulnToInterfaceASYNC(self, host, intname, newVuln):
self.__addPendingAction(modelactions.ADDVULNINT, host, intname, newVuln)
def addVulnToInterfaceSYNC(self, host, intname, newVuln):
self._processAction(modelactions.ADDVULNINT, [host, intname, newVuln], sync=True)
def addVulnToApplicationASYNC(self, host, appname, newVuln):
self.__addPendingAction(modelactions.ADDVULNAPP, host, appname, newVuln)
def addVulnToApplicationSYNC(self, host, appname, newVuln):
self._processAction(modelactions.ADDVULNAPP, [host, appname, newVuln], sync=True)
def addVulnToHostASYNC(self, host, newVuln):
self.__addPendingAction(modelactions.ADDVULNHOST, host, newVuln)
def addVulnToHostSYNC(self, host, newVuln):
self._processAction(modelactions.ADDVULNHOST, [host, newVuln], sync=True)
def addVulnToServiceASYNC(self, host, srvname, newVuln):
self.__addPendingAction(modelactions.ADDVULNSRV, host, srvname, newVuln)
def addVulnToServiceSYNC(self, host, srvname, newVuln):
self._processAction(modelactions.ADDVULNSRV, [host, srvname, newVuln], sync=True)
def addVulnSYNC(self, model_object, newVuln):
self._processAction(modelactions.ADDVULN, [model_object, newVuln], sync=True)
def addVulnWebToServiceASYNC(self, host, srvname, newVuln):
self.__addPendingAction(modelactions.ADDVULNWEBSRV, host, srvname, newVuln)
def addVulnWebToServiceSYNC(self, host, srvname, newVuln):
self._processAction(modelactions.ADDVULNWEBSRV, [host, srvname, newVuln], sync=True)
def __addVulnToModelObject(self, model_object, vuln=None):
res = False
if model_object is not None:
old_vuln = model_object.getVuln(vuln.getID())
if old_vuln:
res = self.addUpdate(old_vuln, vuln)
else:
res = model_object.addVuln(vuln)
if res:
notifier.editHost(model_object.getHost())
return res
def __addVulnerabilityToHost(self, host_id, vuln=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None and vuln is not None:
old_vuln = host.getVuln(vuln.getID())
if old_vuln:
res = self.addUpdate(old_vuln, vuln)
else:
res = host.addVuln(vuln)
if res:
notifier.editHost(host)
api.devlog("__addVulnerabilityToHost result = %s" % res)
return res
def __addVulnerabilityToApplication(self, host_id, application_id, vuln=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None and application_id is not None and vuln is not None:
application = host.getApplication(application_id)
if application is not None:
old_vuln = application.getVuln(vuln.getID())
if old_vuln:
res = self.addUpdate(old_vuln, vuln)
else:
res = application.addVuln(vuln)
if res:
notifier.editHost(application.getHost())
api.devlog("__addVulnerabilityToApplication result = %s" % res)
return res
def __addVulnerabilityToInterface(self, host_id, interface_id, vuln=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None and interface_id is not None and vuln is not None:
interface = host.getInterface(interface_id)
if interface is not None:
old_vuln = interface.getVuln(vuln.getID())
if old_vuln:
res = self.addUpdate(old_vuln, vuln)
else:
res = interface.addVuln(vuln)
if res:
notifier.editHost(interface.getHost())
api.devlog("__addVulnerabilityToInterface result = %s" % res)
return res
def __addVulnerabilityToService(self, host_id, service_id, vuln=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None and service_id is not None and vuln is not None:
service = host.getService(service_id)
if service is not None:
old_vuln = service.getVuln(vuln.getID())
if old_vuln:
res = self.addUpdate(old_vuln, vuln)
else:
res = service.addVuln(vuln)
if res:
notifier.editHost(service.getHost())
api.devlog("__addVulnerabilityToService result = %s" % res)
return res
def delVulnFromApplicationASYNC(self, hostname, appname, vuln):
self.__addPendingAction(modelactions.DELVULNAPP, hostname, appname, vuln)
def delVulnFromApplicationSYNC(self, hostname, appname, vuln):
self._processAction(modelactions.DELVULNAPP, [hostname, appname, vuln], sync=True)
def delVulnFromInterfaceASYNC(self, hostname, intname, vuln):
self.__addPendingAction(modelactions.DELVULNINT, hostname, intname, vuln)
def delVulnFromInterfaceSYNC(self, hostname, intname, vuln):
self._processAction(modelactions.DELVULNINT, [hostname,intname, vuln], sync=True)
def delVulnFromHostASYNC(self, hostname, vuln):
self.__addPendingAction(modelactions.DELVULNHOST, hostname, vuln)
def delVulnFromHostSYNC(self, hostname, vuln):
self._processAction(modelactions.DELVULNHOST, [hostname, vuln], sync=True)
def delVulnFromServiceASYNC(self, hostname, srvname, vuln):
self.__addPendingAction(modelactions.DELVULNSRV, hostname, srvname, vuln)
def delVulnFromServiceSYNC(self, hostname, srvname, vuln):
self._processAction(modelactions.DELVULNSRV, [hostname, srvname, vuln], sync=True)
def delVulnSYNC(self, model_object, vuln_id):
self._processAction(modelactions.DELVULN, [model_object, vuln_id], sync=True)
def __delVulnFromModelObject(self, model_object, vuln_id):
res = False
if model_object is not None:
vuln = model_object.getVuln(vuln_id)
self.__clearVuln(vuln)
res = model_object.delVuln(vuln_id)
if res:
notifier.editHost(model_object.getHost())
return res
def __delVulnerabilityFromHost(self, host_id, vuln_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
vuln = host.getVuln(vuln_id)
self.__clearVuln(vuln)
res = host.delVuln(vuln_id)
if res:
notifier.editHost(host)
return res
def __delVulnerabilityFromInterface(self, host_id, interface_id, vuln_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
interface = host.getInterface(interface_id)
if interface is not None:
vuln = interface.getVuln(vuln_id)
self.__clearVuln(vuln)
res = interface.delVuln(vuln_id)
if res:
notifier.editHost(host)
return res
def __delVulnerabilityFromApplication(self, host_id, application_id, vuln_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
application = host.getApplication(application_id)
if application is not None:
vuln = application.getVuln(vuln_id)
self.__clearVuln(vuln)
res = application.delVuln(vuln_id)
if res:
notifier.editHost(host)
return res
def __delVulnerabilityFromService(self, host_id, service_id, vuln_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
vuln = service.getVuln(vuln_id)
self.__clearVuln(vuln)
res = service.delVuln(vuln_id)
if res:
notifier.editHost(host)
return res
def editVulnSYNC(self, vuln, name, desc, severity, refs):
self._processAction(modelactions.EDITVULN, [vuln, name, desc, severity, refs], sync=True)
def editVulnASYNC(self, vuln, name, desc, severity, refs):
self.__addPendingAction(modelactions.EDITVULN, [vuln, name, desc, severity, refs])
def editVulnWebSYNC(self, vuln, name, desc, website, path, refs, severity,
request, response, method, pname, params, query,
category):
self._processAction(modelactions.EDITVULN,
[vuln, name, desc, website, path, refs, severity,
request, response, method, pname, params, query,
category], sync=True)
def editVulnWebASYNC(self, vuln, name, desc, website, path, refs,
severity, request, response, method, pname,
params, query, category):
self.__addPendingAction(modelactions.EDITVULN,
[vuln, name, desc, website, path, refs,
severity, request, response, method,
pname, params, query, category])
def __editVulnerability(self, vuln, *args):
res = False
if vuln is not None:
vuln.updateAttributes(*args)
res = True
if res:
notifier.editHost(vuln.getHost())
return res
# Note
def addNoteToInterfaceASYNC(self, host, intname, newNote):
self.__addPendingAction(modelactions.ADDNOTEINT, host, intname, newNote)
def addNoteToInterfaceSYNC(self, host, intname, newNote):
self._processAction(modelactions.ADDNOTEINT, [host, intname, newNote], sync=True)
def addNoteToApplicationASYNC(self, host, appname, newNote):
self.__addPendingAction(modelactions.ADDNOTEAPP, host, appname, newNote)
def addNoteToApplicationSYNC(self, host, appname, newNote):
self._processAction(modelactions.ADDNOTEAPP, [host, appname, newNote], sync=True)
def addNoteToHostASYNC(self, host, newNote):
self.__addPendingAction(modelactions.ADDNOTEHOST, host, newNote)
def addNoteToHostSYNC(self, host, newNote):
self._processAction(modelactions.ADDNOTEHOST, [host, newNote], sync=True)
def addNoteToServiceASYNC(self, host, srvname, newNote):
self.__addPendingAction(modelactions.ADDNOTESRV, host, srvname, newNote)
def addNoteToNoteASYNC(self, host, srvname, note_id, newNote):
self.__addPendingAction(modelactions.ADDNOTENOTE, host, srvname, note_id, newNote)
def addNoteToServiceSYNC(self, host, srvname, newNote):
self._processAction(modelactions.ADDNOTESRV, [host, srvname, newNote], sync=True)
def addNoteSYNC(self, model_object, newNote):
self._processAction(modelactions.ADDNOTE, [model_object, newNote], sync=True)
def delNoteFromApplicationASYNC(self, hostname, appname, note):
self.__addPendingAction(modelactions.DELNOTEAPP, hostname, appname, note)
def delNoteFromApplicationSYNC(self, hostname, appname, note):
self._processAction(modelactions.DELNOTEAPP, [hostname, appname, note], sync=True)
def delNoteFromInterfaceASYNC(self, hostname, intname, note):
self.__addPendingAction(modelactions.DELNOTEINT, hostname, intname, note)
def delNoteFromInterfaceSYNC(self, hostname, intname, note):
self._processAction(modelactions.DELNOTEINT, [hostname, intname, note], sync=True)
def delNoteFromHostASYNC(self, hostname, note):
self.__addPendingAction(modelactions.DELNOTEHOST, hostname, note)
def delNoteFromHostSYNC(self, hostname, note):
self._processAction(modelactions.DELNOTEHOST, [hostname, note], sync=True)
def delNoteFromServiceASYNC(self, hostname, srvname, note):
self.__addPendingAction(modelactions.DELNOTESRV, hostname, srvname, note)
def delNoteFromServiceSYNC(self, hostname, srvname, note):
self._processAction(modelactions.DELNOTESRV, [hostname, srvname, note], sync=True)
def delNoteSYNC(self, model_object, note_id):
self._processAction(modelactions.DELNOTE, [model_object, note_id], sync=True)
def addCredToServiceASYNC(self, host, srvname, newCred):
self.__addPendingAction(modelactions.ADDCREDSRV, host, srvname, newCred)
def addCredToServiceSYNC(self, host, srvname, newCred):
self._processAction(modelactions.ADDCREDSRV, [host, srvname, newCred], sync=True)
def delCredFromServiceASYNC(self, hostname, srvname, cred):
self.__addPendingAction(modelactions.DELCREDSRV, hostname, srvname, cred)
def delCredFromServiceSYNC(self, hostname, srvname, note):
self._processAction(modelactions.DELCREDSRV, [hostname, srvname, cred], sync=True)
def __addNote(self, action, host_name, item_name=None, note=None, note_id=None):
res = False
#self.__acquire_host_lock()
# if host is not found nothing is done with the new interface
host = self._getValueByID("_hosts", host_name)
if host is not None:
if action == modelactions.ADDNOTEHOST:
res = host.addNote(note)
else:
if action == modelactions.ADDNOTEAPP:
_getOne = host.getApplication
elif action == modelactions.ADDNOTEINT:
_getOne = host.getInterface
elif action == modelactions.ADDNOTESRV:
_getOne = host.getService
elif action == modelactions.ADDNOTENOTE:
service = host.getService(item_name)
_getOne = service.getNote
item_name = note_id
item = _getOne(item_name)
if item is not None:
res = item.addNote(note)
else:
api.devlog("__addNote: GetNote ID error" + str(item))
notifier.editHost(host)
else:
api.devlog("__addNote failed. Hostname: %s not found" % host_name)
return res
def __addNoteToModelObject(self, model_object, note=None):
res = False
if model_object is not None:
old_note = model_object.getNote(note.getID())
if old_note:
res = self.addUpdate(old_note, note)
else:
res = model_object.addNote(note)
if res:
notifier.editHost(model_object.getHost())
return res
def __addNoteToHost(self, host_id, note=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
old_note = host.getNote(note.getID())
if old_note:
res = self.addUpdate(old_note, note)
else:
res = host.addNote(note)
if res:
notifier.editHost(host)
else:
api.devlog("__addNoteToHost failed. Hostname: %s not found" %
host_id)
return res
def __addNoteToInterface(self, host_id, interface_id, note=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
interface = host.getInterface(interface_id)
if interface is not None:
old_note = interface.getNote(note.getID())
if old_note:
res = self.addUpdate(old_note, note)
else:
res = interface.addNote(note)
if res:
notifier.editHost(host)
else:
api.devlog("__addNote failed. Host ID: %s not found" % host_id)
return res
def __addNoteToApplication(self, host_id, application_id, note=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
application = host.getApplication(application_id)
if application is not None:
old_note = application.getNote(note.getID())
if old_note:
res = self.addUpdate(old_note, note)
else:
res = application.addNote(note)
if res:
notifier.editHost(host)
else:
api.devlog("__addNote failed. Host ID: %s not found" % host_id)
return res
def __addNoteToService(self, host_id, service_id, note=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
old_note = service.getNote(note.getID())
if old_note:
res = self.addUpdate(old_note, note)
else:
res = service.addNote(note)
if res:
notifier.editHost(host)
else:
api.devlog("__addNote failed. Host ID: %s not found" % host_id)
return res
def __addNoteToServiceNote(self, host_id, service_id, note_id, note=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
service_note = service.getNote(note_id)
if note is not None:
old_note = service_note.getNote(note.getID())
if old_note:
res = self.addUpdate(old_note, note)
else:
res = service_note.addNote(note)
if res:
notifier.editHost(host)
else:
api.devlog("__addNote failed. Host ID: %s not found" % host_id)
return res
#DEPRECTED METHOD
def __delNote(self, action, host_name, item_name, note_id):
res = False
# DO NOT USE self.getHost because it will cause a deadlock
# if interface name is ALL then we delete the service from
# the whole host
host = self._getValueByID("_hosts", host_name)
if host is not None:
if action == modelactions.DELNOTEHOST:
res = host.delNote(note_id)
else:
if action == modelactions.DELNOTEAPP:
_getOne = host.getApplication
_getAll = host.getAllApplications
_delItem = host.delApplication
elif action == modelactions.DELNOTEINT:
_getOne = host.getInterface
_getAll = host.getAllInterfaces
_delItem = host.delInterface
elif action == modelactions.DELNOTESRV:
_getOne = host.getService
_getAll = host.getAllServices
_delItem = host.delService
if item_name != "ALL":
item = _getOne(item_name)
# if the service is really in that interface we delete it
# since there are cross references we have to delete the
# service from the interface and if there aren't any other references
# in any other interface then delete it from the host
if item is not None:
res = item.delNote(note_id)
else:
# remove from all interfaces
for item in _getAll():
res = item.delNote(service.getID())
notifier.editHost(host)
self.__release_host_lock()
return res
def __delNoteFromModelObject(self, model_object, note_id):
res = False
if model_object is not None:
note = model_object.getNote(note_id)
self.__clearNote(note)
res = model_object.delNote(note_id)
if res:
notifier.editHost(model_object.getHost())
return res
def __delNoteFromHost(self, host_id, note_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
note = host.getNote(note_id)
self.__clearNote(note)
res = host.delNote(note_id)
if res:
notifier.editHost(host)
return res
def __delNoteFromInterface(self, host_id, interface_id, note_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
interface = host.getInterface(interface_id)
if interface is not None:
note = interface.getNote(note_id)
self.__clearNote(note)
res = interface.delNote(note_id)
if res:
notifier.editHost(host)
return res
def __delNoteFromApplication(self, host_id, application_id, note_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
application = host.getApplication(application_id)
if application is not None:
note = application.getNote(note_id)
self.__clearNote(note)
res = application.delNote(note_id)
if res:
notifier.editHost(host)
return res
def __delNoteFromService(self, host_id, service_id, note_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
note = service.getNote(note_id)
self.__clearNote(note)
res = service.delNote(note_id)
if res:
notifier.editHost(host)
return res
def __delNoteFromServiceNote(self, host_id, service_id, note_id, deep_note_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
note = service.getNote(note_id)
if note is not None:
deep_note = note.getNote(note_id)
self.__clearNote(deep_note)
res = note.delNote(deep_note_id)
if res:
notifier.editHost(host)
return res
def editNoteSYNC(self, note, name, text):
self._processAction(modelactions.EDITNOTE, [note, name, text], sync=True)
def editNoteASYNC(self, note, name, text):
self.__addPendingAction(modelactions.EDITNOTE, [note, name, text])
def __editNote(self, note, name=None, text=None):
res = False
if note is not None:
note.updateAttributes(name, text)
res = True
if res:
notifier.editHost(note.getHost())
return res
def editCredSYNC(self, cred, username, password):
self._processAction(modelactions.EDITCRED, [cred, username, password], sync=True)
def editCredASYNC(self, cred, username, password):
self.__addPendingAction(modelactions.EDITCRED, [cred, username, password])
def __editCred(self, cred, username=None, password=None):
res = False
if cred is not None:
cred.updateAttributes(username, password)
res = True
if res:
notifier.editHost(cred.getHost())
return res
def addCredSYNC(self, model_object, newCred):
self._processAction(modelactions.ADDCRED, [model_object, newCred], sync=True)
def __addCredToModelObject(self, model_object, cred=None):
res = False
if model_object is not None:
old_cred = model_object.getCred(cred.getID())
if old_cred:
res = self.addUpdate(old_cred, cred)
else:
res = model_object.addCred(cred)
if res:
notifier.editHost(model_object.getHost())
return res
def delCredSYNC(self, model_object, cred_id):
self._processAction(modelactions.DELCRED, [model_object, cred_id], sync=True)
def __delCredFromModelObject(self, model_object, cred_id):
res = False
if model_object is not None:
cred = model_object.getCred(cred_id)
self.__clearCred(cred)
res = model_object.delCred(cred_id)
if res:
notifier.editHost(model_object.getHost())
return res
def __addCredToService(self, host_id, service_id, cred=None):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
old_cred = service.getCred(cred.getID())
if old_cred:
res = self.addUpdate(old_cred, cred)
else:
res = service.addCred(cred)
if res:
notifier.editHost(host)
else:
api.devlog("__addCred failed. Host ID: %s not found" % host_id)
return res
def __delCredFromService(self, host_id, service_id, cred_id):
res = False
host = self._getValueByID("_hosts", host_id)
if host is not None:
service = host.getService(service_id)
if service is not None:
cred = service.getCred(cred_id)
self.__clearCred(cred)
res = service.delCred(cred_id)
if res:
notifier.editHost(host)
return res
def getHost(self, name):
self.__acquire_host_lock()
h = self._getValueByID("_hosts", name)
self.__release_host_lock()
return h
def getHostsCount(self):
return len(self._hosts)
def getAllHosts(self, mode=0):
"""
return all interfaces in this host
mode = 0 returns a list of hosts objects
mode = 1 returns a dictionary of hosts objects with their id as key
"""
#TODO: this can be a problem because if a host is deleted
# while another is using this host list, then the information
# provided here would be wrong
self.__acquire_host_lock()
#hosts = self.__getattribute__("_hosts").getContainer()
hosts = self.__getattribute__("_hosts").values()
self.__release_host_lock()
return hosts
def setWorkspace(self, workspace):
self._workspace = workspace
self._hosts = self._workspace.getContainee()
self._workspace.load()
self.createIndex(self._hosts)
notifier.workspaceChanged(self._workspace)
def createIndex(self, hosts):
self.treeWordsTries = TreeWordsTries()
self.treeWordsTries.clear()
for k in hosts.keys():
h = hosts[k]
self.treeWordsTries.addWord(h.getName())
for intr in h.getAllInterfaces():
ipv4 = intr.ipv4
ipv6 = intr.ipv6
if not ipv4['address'] in ["0.0.0.0", None]:
self.treeWordsTries.addWord(ipv4['address'])
if not ipv6['address'] in ["0000:0000:0000:0000:0000:0000:0000:0000", None]:
self.treeWordsTries.addWord(ipv6['address'])
for hostname in intr.getHostnames():
self.treeWordsTries.addWord(hostname)
def getWorkspace(self):
return self._workspace
def checkPermissions(self, op):
## In order to use the decorator passPermissionsOrRaise
## The client should implement checkPermissions method.
self.__sec.checkPermissions(op)
def getWorkspaceSyncronizer(self):
return WorkspaceSyncronizer(self.getWorkspace())
#@passPermissionsOrRaise
@lockModel
def syncActiveWorkspace(self):
if len(self.getWorkspace().getConflicts()):
#There are some conflicts
notifier.showPopup("Sync Failed! \nYou should check if there are some conflicts to resolve")
return False
ws = self.getWorkspaceSyncronizer()
if not ws.sync():
notifier.showPopup("Sync Failed! \nYou should check if there are some conflicts to resolve")
return False
notifier.workspaceLoad(self.getAllHosts())
return True
|
dan-mi-sun/bitcoin | refs/heads/master | qa/rpc-tests/rawtransactions.py | 87 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test re-org scenarios with a mempool that contains transactions
# that spend (directly or indirectly) coinbase transactions.
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from pprint import pprint
from time import sleep
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self, split=False):
self.nodes = start_nodes(3, self.options.tmpdir)
#connect to a local machine for debugging
#url = "http://bitcoinrpc:DP6DvqZtqXarpeNWyN3LZTFchCCyCUuHwNF7E8pX99x1@%s:%d" % ('127.0.0.1', 18332)
#proxy = AuthServiceProxy(url)
#proxy.url = url # store URL on proxy for info
#self.nodes.append(proxy)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
#prepare some coins for multiple *rawtransaction commands
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0);
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
#########################################
# sendrawtransaction with missing input #
#########################################
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransaction(rawtx)
errorString = ""
try:
rawtx = self.nodes[2].sendrawtransaction(rawtx['hex'])
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Missing inputs" in errorString, True);
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 BTC to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2);
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
addr3Obj = self.nodes[2].validateaddress(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2);
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
sPK = rawTx['vout'][0]['scriptPubKey']['hex']
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS A INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break;
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
if __name__ == '__main__':
RawTransactionsTest().main()
|
pyrotron/heekscnc | refs/heads/master | pycnc/wxProgramWindow.py | 25 | import wx
class ProgramWindow(wx.ScrolledWindow):
def __init__(self, parent):
wx.ScrolledWindow.__init__(self, parent, name = 'Program', style = wx.HSCROLL + wx.VSCROLL + wx.NO_FULL_REPAINT_ON_RESIZE)
self.textCtrl = wx.TextCtrl(self, 100, "", style = wx.TE_MULTILINE + wx.TE_DONTWRAP)
self.textCtrl.SetMaxLength(0) # Ensure the length is as long as this operating system supports. (It may be only 32kb or 64kb)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Resize()
def OnSize(self, event):
self.Resize()
event.Skip()
def Resize(self):
self.textCtrl.SetSize(self.GetClientSize())
def Clear(self):
self.textCtrl.Clear()
def AppendText(self, value):
self.textCtrl.AppendText(str(value)) |
pombredanne/pylearn2 | refs/heads/master | pylearn2/model_extensions/tests/test_norm_constraint.py | 21 | """
Tests of functionality in norm_constraint.py
"""
import numpy as np
from theano.compat import OrderedDict
from theano import function
from pylearn2.models.model import Model
from pylearn2.model_extensions.norm_constraint import ConstrainFilterMaxNorm
from pylearn2.model_extensions.norm_constraint import MaxL2FilterNorm
from pylearn2.utils import sharedX
class ModelWithW(Model):
"""
A dummy model that has some weights.
Parameters
----------
W : 2-D theano shared
The model's weights.
"""
def __init__(self, W):
self.W = W
super(ModelWithW, self).__init__()
def test_max_l2_filter_norm():
"""
Test that MaxL2FilterNorm matches a manual implementation.
"""
limit = 1.
ext = MaxL2FilterNorm(limit)
W = np.zeros((2, 4))
# Column 0 tests the case where a column has zero norm
# Column 1 tests the case where a column is smaller than the limit
W[0, 1] = .5
# Column 2 tests the case where a column is on the limit
W[0, 2] = 1.
# Column 3 tests the case where a column is too big
W[0, 3] = 2.
W = sharedX(W / 2.)
model = ModelWithW(W)
model.extensions.append(ext)
updates = OrderedDict()
updates[W] = W * 2.
model.modify_updates(updates)
f = function([], updates=updates)
f()
W = W.get_value()
assert W.shape == (2, 4)
assert np.abs(W[1, :]).max() == 0
assert W[0, 0] == 0.
assert W[0, 1] == 0.5
assert W[0, 2] == 1.
assert W[0, 3] == 1., W[0, 3]
def test_constrain_filter_max_norm():
"""
Test that ConstrainFilterNorm matches a manual implementation.
"""
limit = 1.
ext = ConstrainFilterMaxNorm(limit)
W = np.zeros((2, 4))
# Column 0 tests the case where an element has zero norm
# Column 1 tests the case where an element is smaller than the limit
W[0, 1] = .5
# Column 2 tests the case where an element is on the limit
W[0, 2] = 1.
# Column 3 tests the case where an element is too big
W[0, 3] = 2.
W = sharedX(W / 2.)
model = ModelWithW(W)
model.extensions.append(ext)
updates = OrderedDict()
updates[W] = W * 2.
model.modify_updates(updates)
f = function([], updates=updates)
f()
W = W.get_value()
assert W.shape == (2, 4)
assert np.abs(W[1, :]).max() == 0
assert W[0, 0] == 0.
assert W[0, 1] == 0.5
assert W[0, 2] == 1.
assert W[0, 3] == 1., W[0, 3]
|
xiaokeng/robotframework | refs/heads/master | src/robot/running/arguments/__init__.py | 23 | # Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from .argumentmapper import ArgumentMapper
from .argumentparser import (PythonArgumentParser, UserKeywordArgumentParser,
DynamicArgumentParser, JavaArgumentParser)
from .argumentresolver import ArgumentResolver
from .argumentspec import ArgumentSpec
from .argumentvalidator import ArgumentValidator
from .embedded import EmbeddedArguments
if sys.platform.startswith('java'):
from .javaargumentcoercer import JavaArgumentCoercer
else:
JavaArgumentCoercer = lambda *args: None
|
GuillaumeGomez/servo | refs/heads/master | tests/wpt/css-tests/tools/pytest/testing/test_terminal.py | 169 | """
terminal reporting of the full testing process.
"""
import collections
import sys
import _pytest._pluggy as pluggy
import _pytest._code
import py
import pytest
from _pytest import runner
from _pytest.main import EXIT_NOTESTSCOLLECTED
from _pytest.terminal import TerminalReporter, repr_pythonversion, getreportopt
from _pytest.terminal import build_summary_stats_line, _plugin_nameversions
def basic_run_report(item):
runner.call_and_report(item, "setup", log=False)
return runner.call_and_report(item, "call", log=False)
DistInfo = collections.namedtuple('DistInfo', ['project_name', 'version'])
class Option:
def __init__(self, verbose=False, fulltrace=False):
self.verbose = verbose
self.fulltrace = fulltrace
@property
def args(self):
l = []
if self.verbose:
l.append('-v')
if self.fulltrace:
l.append('--fulltrace')
return l
def pytest_generate_tests(metafunc):
if "option" in metafunc.fixturenames:
metafunc.addcall(id="default",
funcargs={'option': Option(verbose=False)})
metafunc.addcall(id="verbose",
funcargs={'option': Option(verbose=True)})
metafunc.addcall(id="quiet",
funcargs={'option': Option(verbose= -1)})
metafunc.addcall(id="fulltrace",
funcargs={'option': Option(fulltrace=True)})
@pytest.mark.parametrize('input,expected', [
([DistInfo(project_name='test', version=1)], ['test-1']),
([DistInfo(project_name='pytest-test', version=1)], ['test-1']),
([
DistInfo(project_name='test', version=1),
DistInfo(project_name='test', version=1)
], ['test-1']),
], ids=['normal', 'prefix-strip', 'deduplicate'])
def test_plugin_nameversion(input, expected):
pluginlist = [(None, x) for x in input]
result = _plugin_nameversions(pluginlist)
assert result == expected
class TestTerminal:
def test_pass_skip_fail(self, testdir, option):
testdir.makepyfile("""
import pytest
def test_ok():
pass
def test_skip():
pytest.skip("xx")
def test_func():
assert 0
""")
result = testdir.runpytest(*option.args)
if option.verbose:
result.stdout.fnmatch_lines([
"*test_pass_skip_fail.py::test_ok PASS*",
"*test_pass_skip_fail.py::test_skip SKIP*",
"*test_pass_skip_fail.py::test_func FAIL*",
])
else:
result.stdout.fnmatch_lines([
"*test_pass_skip_fail.py .sF"
])
result.stdout.fnmatch_lines([
" def test_func():",
"> assert 0",
"E assert 0",
])
def test_internalerror(self, testdir, linecomp):
modcol = testdir.getmodulecol("def test_one(): pass")
rep = TerminalReporter(modcol.config, file=linecomp.stringio)
excinfo = pytest.raises(ValueError, "raise ValueError('hello')")
rep.pytest_internalerror(excinfo.getrepr())
linecomp.assert_contains_lines([
"INTERNALERROR> *ValueError*hello*"
])
def test_writeline(self, testdir, linecomp):
modcol = testdir.getmodulecol("def test_one(): pass")
rep = TerminalReporter(modcol.config, file=linecomp.stringio)
rep.write_fspath_result(modcol.nodeid, ".")
rep.write_line("hello world")
lines = linecomp.stringio.getvalue().split('\n')
assert not lines[0]
assert lines[1].endswith(modcol.name + " .")
assert lines[2] == "hello world"
def test_show_runtest_logstart(self, testdir, linecomp):
item = testdir.getitem("def test_func(): pass")
tr = TerminalReporter(item.config, file=linecomp.stringio)
item.config.pluginmanager.register(tr)
location = item.reportinfo()
tr.config.hook.pytest_runtest_logstart(nodeid=item.nodeid,
location=location, fspath=str(item.fspath))
linecomp.assert_contains_lines([
"*test_show_runtest_logstart.py*"
])
def test_runtest_location_shown_before_test_starts(self, testdir):
testdir.makepyfile("""
def test_1():
import time
time.sleep(20)
""")
child = testdir.spawn_pytest("")
child.expect(".*test_runtest_location.*py")
child.sendeof()
child.kill(15)
def test_itemreport_subclasses_show_subclassed_file(self, testdir):
testdir.makepyfile(test_p1="""
class BaseTests:
def test_p1(self):
pass
class TestClass(BaseTests):
pass
""")
p2 = testdir.makepyfile(test_p2="""
from test_p1 import BaseTests
class TestMore(BaseTests):
pass
""")
result = testdir.runpytest(p2)
result.stdout.fnmatch_lines([
"*test_p2.py .",
"*1 passed*",
])
result = testdir.runpytest("-v", p2)
result.stdout.fnmatch_lines([
"*test_p2.py::TestMore::test_p1* <- *test_p1.py*PASSED",
])
def test_itemreport_directclasses_not_shown_as_subclasses(self, testdir):
a = testdir.mkpydir("a123")
a.join("test_hello123.py").write(_pytest._code.Source("""
class TestClass:
def test_method(self):
pass
"""))
result = testdir.runpytest("-v")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*a123/test_hello123.py*PASS*",
])
assert " <- " not in result.stdout.str()
def test_keyboard_interrupt(self, testdir, option):
testdir.makepyfile("""
def test_foobar():
assert 0
def test_spamegg():
import py; pytest.skip('skip me please!')
def test_interrupt_me():
raise KeyboardInterrupt # simulating the user
""")
result = testdir.runpytest(*option.args, no_reraise_ctrlc=True)
result.stdout.fnmatch_lines([
" def test_foobar():",
"> assert 0",
"E assert 0",
"*_keyboard_interrupt.py:6: KeyboardInterrupt*",
])
if option.fulltrace:
result.stdout.fnmatch_lines([
"*raise KeyboardInterrupt # simulating the user*",
])
else:
result.stdout.fnmatch_lines([
"to show a full traceback on KeyboardInterrupt use --fulltrace"
])
result.stdout.fnmatch_lines(['*KeyboardInterrupt*'])
def test_keyboard_in_sessionstart(self, testdir):
testdir.makeconftest("""
def pytest_sessionstart():
raise KeyboardInterrupt
""")
testdir.makepyfile("""
def test_foobar():
pass
""")
result = testdir.runpytest(no_reraise_ctrlc=True)
assert result.ret == 2
result.stdout.fnmatch_lines(['*KeyboardInterrupt*'])
class TestCollectonly:
def test_collectonly_basic(self, testdir):
testdir.makepyfile("""
def test_func():
pass
""")
result = testdir.runpytest("--collect-only",)
result.stdout.fnmatch_lines([
"<Module 'test_collectonly_basic.py'>",
" <Function 'test_func'>",
])
def test_collectonly_skipped_module(self, testdir):
testdir.makepyfile("""
import pytest
pytest.skip("hello")
""")
result = testdir.runpytest("--collect-only", "-rs")
result.stdout.fnmatch_lines([
"SKIP*hello*",
"*1 skip*",
])
def test_collectonly_failed_module(self, testdir):
testdir.makepyfile("""raise ValueError(0)""")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*raise ValueError*",
"*1 error*",
])
def test_collectonly_fatal(self, testdir):
testdir.makeconftest("""
def pytest_collectstart(collector):
assert 0, "urgs"
""")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*INTERNAL*args*"
])
assert result.ret == 3
def test_collectonly_simple(self, testdir):
p = testdir.makepyfile("""
def test_func1():
pass
class TestClass:
def test_method(self):
pass
""")
result = testdir.runpytest("--collect-only", p)
#assert stderr.startswith("inserting into sys.path")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*<Module '*.py'>",
"* <Function 'test_func1'*>",
"* <Class 'TestClass'>",
#"* <Instance '()'>",
"* <Function 'test_method'*>",
])
def test_collectonly_error(self, testdir):
p = testdir.makepyfile("import Errlkjqweqwe")
result = testdir.runpytest("--collect-only", p)
assert result.ret == 1
result.stdout.fnmatch_lines(_pytest._code.Source("""
*ERROR*
*import Errlk*
*ImportError*
*1 error*
""").strip())
def test_collectonly_missing_path(self, testdir):
"""this checks issue 115,
failure in parseargs will cause session
not to have the items attribute
"""
result = testdir.runpytest("--collect-only", "uhm_missing_path")
assert result.ret == 4
result.stderr.fnmatch_lines([
'*ERROR: file not found*',
])
def test_collectonly_quiet(self, testdir):
testdir.makepyfile("def test_foo(): pass")
result = testdir.runpytest("--collect-only", "-q")
result.stdout.fnmatch_lines([
'*test_foo*',
])
def test_collectonly_more_quiet(self, testdir):
testdir.makepyfile(test_fun="def test_foo(): pass")
result = testdir.runpytest("--collect-only", "-qq")
result.stdout.fnmatch_lines([
'*test_fun.py: 1*',
])
def test_repr_python_version(monkeypatch):
try:
monkeypatch.setattr(sys, 'version_info', (2, 5, 1, 'final', 0))
assert repr_pythonversion() == "2.5.1-final-0"
py.std.sys.version_info = x = (2, 3)
assert repr_pythonversion() == str(x)
finally:
monkeypatch.undo() # do this early as pytest can get confused
class TestFixtureReporting:
def test_setup_fixture_error(self, testdir):
testdir.makepyfile("""
def setup_function(function):
print ("setup func")
assert 0
def test_nada():
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ERROR at setup of test_nada*",
"*setup_function(function):*",
"*setup func*",
"*assert 0*",
"*1 error*",
])
assert result.ret != 0
def test_teardown_fixture_error(self, testdir):
testdir.makepyfile("""
def test_nada():
pass
def teardown_function(function):
print ("teardown func")
assert 0
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ERROR at teardown*",
"*teardown_function(function):*",
"*assert 0*",
"*Captured stdout*",
"*teardown func*",
"*1 passed*1 error*",
])
def test_teardown_fixture_error_and_test_failure(self, testdir):
testdir.makepyfile("""
def test_fail():
assert 0, "failingfunc"
def teardown_function(function):
print ("teardown func")
assert False
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ERROR at teardown of test_fail*",
"*teardown_function(function):*",
"*assert False*",
"*Captured stdout*",
"*teardown func*",
"*test_fail*",
"*def test_fail():",
"*failingfunc*",
"*1 failed*1 error*",
])
class TestTerminalFunctional:
def test_deselected(self, testdir):
testpath = testdir.makepyfile("""
def test_one():
pass
def test_two():
pass
def test_three():
pass
"""
)
result = testdir.runpytest("-k", "test_two:", testpath)
result.stdout.fnmatch_lines([
"*test_deselected.py ..",
"=* 1 test*deselected by*test_two:*=",
])
assert result.ret == 0
def test_no_skip_summary_if_failure(self, testdir):
testdir.makepyfile("""
import pytest
def test_ok():
pass
def test_fail():
assert 0
def test_skip():
pytest.skip("dontshow")
""")
result = testdir.runpytest()
assert result.stdout.str().find("skip test summary") == -1
assert result.ret == 1
def test_passes(self, testdir):
p1 = testdir.makepyfile("""
def test_passes():
pass
class TestClass:
def test_method(self):
pass
""")
old = p1.dirpath().chdir()
try:
result = testdir.runpytest()
finally:
old.chdir()
result.stdout.fnmatch_lines([
"test_passes.py ..",
"* 2 pass*",
])
assert result.ret == 0
def test_header_trailer_info(self, testdir):
testdir.makepyfile("""
def test_passes():
pass
""")
result = testdir.runpytest()
verinfo = ".".join(map(str, py.std.sys.version_info[:3]))
result.stdout.fnmatch_lines([
"*===== test session starts ====*",
"platform %s -- Python %s*pytest-%s*py-%s*pluggy-%s" % (
py.std.sys.platform, verinfo,
pytest.__version__, py.__version__, pluggy.__version__),
"*test_header_trailer_info.py .",
"=* 1 passed*in *.[0-9][0-9] seconds *=",
])
if pytest.config.pluginmanager.list_plugin_distinfo():
result.stdout.fnmatch_lines([
"plugins: *",
])
def test_showlocals(self, testdir):
p1 = testdir.makepyfile("""
def test_showlocals():
x = 3
y = "x" * 5000
assert 0
""")
result = testdir.runpytest(p1, '-l')
result.stdout.fnmatch_lines([
#"_ _ * Locals *",
"x* = 3",
"y* = 'xxxxxx*"
])
def test_verbose_reporting(self, testdir, pytestconfig):
p1 = testdir.makepyfile("""
import pytest
def test_fail():
raise ValueError()
def test_pass():
pass
class TestClass:
def test_skip(self):
pytest.skip("hello")
def test_gen():
def check(x):
assert x == 1
yield check, 0
""")
result = testdir.runpytest(p1, '-v')
result.stdout.fnmatch_lines([
"*test_verbose_reporting.py::test_fail *FAIL*",
"*test_verbose_reporting.py::test_pass *PASS*",
"*test_verbose_reporting.py::TestClass::test_skip *SKIP*",
"*test_verbose_reporting.py::test_gen*0* *FAIL*",
])
assert result.ret == 1
if not pytestconfig.pluginmanager.get_plugin("xdist"):
pytest.skip("xdist plugin not installed")
result = testdir.runpytest(p1, '-v', '-n 1')
result.stdout.fnmatch_lines([
"*FAIL*test_verbose_reporting.py::test_fail*",
])
assert result.ret == 1
def test_quiet_reporting(self, testdir):
p1 = testdir.makepyfile("def test_pass(): pass")
result = testdir.runpytest(p1, '-q')
s = result.stdout.str()
assert 'test session starts' not in s
assert p1.basename not in s
assert "===" not in s
assert "passed" in s
def test_more_quiet_reporting(self, testdir):
p1 = testdir.makepyfile("def test_pass(): pass")
result = testdir.runpytest(p1, '-qq')
s = result.stdout.str()
assert 'test session starts' not in s
assert p1.basename not in s
assert "===" not in s
assert "passed" not in s
def test_fail_extra_reporting(testdir):
testdir.makepyfile("def test_this(): assert 0")
result = testdir.runpytest()
assert 'short test summary' not in result.stdout.str()
result = testdir.runpytest('-rf')
result.stdout.fnmatch_lines([
"*test summary*",
"FAIL*test_fail_extra_reporting*",
])
def test_fail_reporting_on_pass(testdir):
testdir.makepyfile("def test_this(): assert 1")
result = testdir.runpytest('-rf')
assert 'short test summary' not in result.stdout.str()
def test_pass_extra_reporting(testdir):
testdir.makepyfile("def test_this(): assert 1")
result = testdir.runpytest()
assert 'short test summary' not in result.stdout.str()
result = testdir.runpytest('-rp')
result.stdout.fnmatch_lines([
"*test summary*",
"PASS*test_pass_extra_reporting*",
])
def test_pass_reporting_on_fail(testdir):
testdir.makepyfile("def test_this(): assert 0")
result = testdir.runpytest('-rp')
assert 'short test summary' not in result.stdout.str()
def test_pass_output_reporting(testdir):
testdir.makepyfile("""
def test_pass_output():
print("Four score and seven years ago...")
""")
result = testdir.runpytest()
assert 'Four score and seven years ago...' not in result.stdout.str()
result = testdir.runpytest('-rP')
result.stdout.fnmatch_lines([
"Four score and seven years ago...",
])
def test_color_yes(testdir):
testdir.makepyfile("def test_this(): assert 1")
result = testdir.runpytest('--color=yes')
assert 'test session starts' in result.stdout.str()
assert '\x1b[1m' in result.stdout.str()
def test_color_no(testdir):
testdir.makepyfile("def test_this(): assert 1")
result = testdir.runpytest('--color=no')
assert 'test session starts' in result.stdout.str()
assert '\x1b[1m' not in result.stdout.str()
@pytest.mark.parametrize('verbose', [True, False])
def test_color_yes_collection_on_non_atty(testdir, verbose):
"""skip collect progress report when working on non-terminals.
#1397
"""
testdir.makepyfile("""
import pytest
@pytest.mark.parametrize('i', range(10))
def test_this(i):
assert 1
""")
args = ['--color=yes']
if verbose:
args.append('-vv')
result = testdir.runpytest(*args)
assert 'test session starts' in result.stdout.str()
assert '\x1b[1m' in result.stdout.str()
assert 'collecting 10 items' not in result.stdout.str()
if verbose:
assert 'collecting ...' in result.stdout.str()
assert 'collected 10 items' in result.stdout.str()
def test_getreportopt():
class config:
class option:
reportchars = ""
config.option.report = "xfailed"
assert getreportopt(config) == "x"
config.option.report = "xfailed,skipped"
assert getreportopt(config) == "xs"
config.option.report = "skipped,xfailed"
assert getreportopt(config) == "sx"
config.option.report = "skipped"
config.option.reportchars = "sf"
assert getreportopt(config) == "sf"
config.option.reportchars = "sfx"
assert getreportopt(config) == "sfx"
def test_terminalreporter_reportopt_addopts(testdir):
testdir.makeini("[pytest]\naddopts=-rs")
testdir.makepyfile("""
def pytest_funcarg__tr(request):
tr = request.config.pluginmanager.getplugin("terminalreporter")
return tr
def test_opt(tr):
assert tr.hasopt('skipped')
assert not tr.hasopt('qwe')
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 passed*"
])
def test_tbstyle_short(testdir):
p = testdir.makepyfile("""
def pytest_funcarg__arg(request):
return 42
def test_opt(arg):
x = 0
assert x
""")
result = testdir.runpytest("--tb=short")
s = result.stdout.str()
assert 'arg = 42' not in s
assert 'x = 0' not in s
result.stdout.fnmatch_lines([
"*%s:5*" % p.basename,
" assert x",
"E assert*",
])
result = testdir.runpytest()
s = result.stdout.str()
assert 'x = 0' in s
assert 'assert x' in s
def test_traceconfig(testdir, monkeypatch):
result = testdir.runpytest("--traceconfig")
result.stdout.fnmatch_lines([
"*active plugins*"
])
assert result.ret == EXIT_NOTESTSCOLLECTED
class TestGenericReporting:
""" this test class can be subclassed with a different option
provider to run e.g. distributed tests.
"""
def test_collect_fail(self, testdir, option):
testdir.makepyfile("import xyz\n")
result = testdir.runpytest(*option.args)
result.stdout.fnmatch_lines([
"? import xyz",
"E ImportError: No module named *xyz*",
"*1 error*",
])
def test_maxfailures(self, testdir, option):
testdir.makepyfile("""
def test_1():
assert 0
def test_2():
assert 0
def test_3():
assert 0
""")
result = testdir.runpytest("--maxfail=2", *option.args)
result.stdout.fnmatch_lines([
"*def test_1():*",
"*def test_2():*",
"*!! Interrupted: stopping after 2 failures*!!*",
"*2 failed*",
])
def test_tb_option(self, testdir, option):
testdir.makepyfile("""
import pytest
def g():
raise IndexError
def test_func():
print (6*7)
g() # --calling--
""")
for tbopt in ["long", "short", "no"]:
print('testing --tb=%s...' % tbopt)
result = testdir.runpytest('--tb=%s' % tbopt)
s = result.stdout.str()
if tbopt == "long":
assert 'print (6*7)' in s
else:
assert 'print (6*7)' not in s
if tbopt != "no":
assert '--calling--' in s
assert 'IndexError' in s
else:
assert 'FAILURES' not in s
assert '--calling--' not in s
assert 'IndexError' not in s
def test_tb_crashline(self, testdir, option):
p = testdir.makepyfile("""
import pytest
def g():
raise IndexError
def test_func1():
print (6*7)
g() # --calling--
def test_func2():
assert 0, "hello"
""")
result = testdir.runpytest("--tb=line")
bn = p.basename
result.stdout.fnmatch_lines([
"*%s:3: IndexError*" % bn,
"*%s:8: AssertionError: hello*" % bn,
])
s = result.stdout.str()
assert "def test_func2" not in s
def test_pytest_report_header(self, testdir, option):
testdir.makeconftest("""
def pytest_sessionstart(session):
session.config._somevalue = 42
def pytest_report_header(config):
return "hello: %s" % config._somevalue
""")
testdir.mkdir("a").join("conftest.py").write("""
def pytest_report_header(config, startdir):
return ["line1", str(startdir)]
""")
result = testdir.runpytest("a")
result.stdout.fnmatch_lines([
"*hello: 42*",
"line1",
str(testdir.tmpdir),
])
@pytest.mark.xfail("not hasattr(os, 'dup')")
def test_fdopen_kept_alive_issue124(testdir):
testdir.makepyfile("""
import os, sys
k = []
def test_open_file_and_keep_alive(capfd):
stdout = os.fdopen(1, 'w', 1)
k.append(stdout)
def test_close_kept_alive_file():
stdout = k.pop()
stdout.close()
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*2 passed*"
])
def test_tbstyle_native_setup_error(testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def setup_error_fixture():
raise Exception("error in exception")
def test_error_fixture(setup_error_fixture):
pass
""")
result = testdir.runpytest("--tb=native")
result.stdout.fnmatch_lines([
'*File *test_tbstyle_native_setup_error.py", line *, in setup_error_fixture*'
])
def test_terminal_summary(testdir):
testdir.makeconftest("""
def pytest_terminal_summary(terminalreporter):
w = terminalreporter
w.section("hello")
w.line("world")
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines("""
*==== hello ====*
world
""")
def test_terminal_summary_warnings_are_displayed(testdir):
"""Test that warnings emitted during pytest_terminal_summary are displayed.
(#1305).
"""
testdir.makeconftest("""
def pytest_terminal_summary(terminalreporter):
config = terminalreporter.config
config.warn('C1', 'internal warning')
""")
result = testdir.runpytest('-rw')
result.stdout.fnmatch_lines([
'*C1*internal warning',
'*== 1 pytest-warnings in *',
])
@pytest.mark.parametrize("exp_color, exp_line, stats_arg", [
# The method under test only cares about the length of each
# dict value, not the actual contents, so tuples of anything
# suffice
# Important statuses -- the highest priority of these always wins
("red", "1 failed", {"failed": (1,)}),
("red", "1 failed, 1 passed", {"failed": (1,), "passed": (1,)}),
("red", "1 error", {"error": (1,)}),
("red", "1 passed, 1 error", {"error": (1,), "passed": (1,)}),
# (a status that's not known to the code)
("yellow", "1 weird", {"weird": (1,)}),
("yellow", "1 passed, 1 weird", {"weird": (1,), "passed": (1,)}),
("yellow", "1 pytest-warnings", {"warnings": (1,)}),
("yellow", "1 passed, 1 pytest-warnings", {"warnings": (1,),
"passed": (1,)}),
("green", "5 passed", {"passed": (1,2,3,4,5)}),
# "Boring" statuses. These have no effect on the color of the summary
# line. Thus, if *every* test has a boring status, the summary line stays
# at its default color, i.e. yellow, to warn the user that the test run
# produced no useful information
("yellow", "1 skipped", {"skipped": (1,)}),
("green", "1 passed, 1 skipped", {"skipped": (1,), "passed": (1,)}),
("yellow", "1 deselected", {"deselected": (1,)}),
("green", "1 passed, 1 deselected", {"deselected": (1,), "passed": (1,)}),
("yellow", "1 xfailed", {"xfailed": (1,)}),
("green", "1 passed, 1 xfailed", {"xfailed": (1,), "passed": (1,)}),
("yellow", "1 xpassed", {"xpassed": (1,)}),
("green", "1 passed, 1 xpassed", {"xpassed": (1,), "passed": (1,)}),
# Likewise if no tests were found at all
("yellow", "no tests ran", {}),
# Test the empty-key special case
("yellow", "no tests ran", {"": (1,)}),
("green", "1 passed", {"": (1,), "passed": (1,)}),
# A couple more complex combinations
("red", "1 failed, 2 passed, 3 xfailed",
{"passed": (1,2), "failed": (1,), "xfailed": (1,2,3)}),
("green", "1 passed, 2 skipped, 3 deselected, 2 xfailed",
{"passed": (1,),
"skipped": (1,2),
"deselected": (1,2,3),
"xfailed": (1,2)}),
])
def test_summary_stats(exp_line, exp_color, stats_arg):
print("Based on stats: %s" % stats_arg)
print("Expect summary: \"%s\"; with color \"%s\"" % (exp_line, exp_color))
(line, color) = build_summary_stats_line(stats_arg)
print("Actually got: \"%s\"; with color \"%s\"" % (line, color))
assert line == exp_line
assert color == exp_color
|
plotly/python-api | refs/heads/master | packages/python/plotly/plotly/validators/scattergeo/_hoverlabel.py | 2 | import _plotly_utils.basevalidators
class HoverlabelValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="hoverlabel", parent_name="scattergeo", **kwargs):
super(HoverlabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Hoverlabel"),
data_docs=kwargs.pop(
"data_docs",
"""
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength .
""",
),
**kwargs
)
|
kevinsawicki/node-gyp | refs/heads/master | gyp/test/external-cross-compile/src/fake_cross.py | 344 | #!/usr/bin/python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
fh = open(sys.argv[1], 'w')
filenames = sys.argv[2:]
for filename in filenames:
subfile = open(filename)
data = subfile.read()
subfile.close()
fh.write(data)
fh.close()
|
marcwebbie/youtube-dl | refs/heads/master | youtube_dl/extractor/abcnews.py | 11 | # coding: utf-8
from __future__ import unicode_literals
import calendar
import re
import time
from .amp import AMPIE
from .common import InfoExtractor
from ..compat import compat_urlparse
class AbcNewsVideoIE(AMPIE):
IE_NAME = 'abcnews:video'
_VALID_URL = r'https?://abcnews\.go\.com/[^/]+/video/(?P<display_id>[0-9a-z-]+)-(?P<id>\d+)'
_TESTS = [{
'url': 'http://abcnews.go.com/ThisWeek/video/week-exclusive-irans-foreign-minister-zarif-20411932',
'info_dict': {
'id': '20411932',
'ext': 'mp4',
'display_id': 'week-exclusive-irans-foreign-minister-zarif',
'title': '\'This Week\' Exclusive: Iran\'s Foreign Minister Zarif',
'description': 'George Stephanopoulos goes one-on-one with Iranian Foreign Minister Dr. Javad Zarif.',
'duration': 180,
'thumbnail': 're:^https?://.*\.jpg$',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://abcnews.go.com/2020/video/2020-husband-stands-teacher-jail-student-affairs-26119478',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
video_id = mobj.group('id')
info_dict = self._extract_feed_info(
'http://abcnews.go.com/video/itemfeed?id=%s' % video_id)
info_dict.update({
'id': video_id,
'display_id': display_id,
})
return info_dict
class AbcNewsIE(InfoExtractor):
IE_NAME = 'abcnews'
_VALID_URL = r'https?://abcnews\.go\.com/(?:[^/]+/)+(?P<display_id>[0-9a-z-]+)/story\?id=(?P<id>\d+)'
_TESTS = [{
'url': 'http://abcnews.go.com/Blotter/News/dramatic-video-rare-death-job-america/story?id=10498713#.UIhwosWHLjY',
'info_dict': {
'id': '10498713',
'ext': 'flv',
'display_id': 'dramatic-video-rare-death-job-america',
'title': 'Occupational Hazards',
'description': 'Nightline investigates the dangers that lurk at various jobs.',
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20100428',
'timestamp': 1272412800,
},
'add_ie': ['AbcNewsVideo'],
}, {
'url': 'http://abcnews.go.com/Entertainment/justin-timberlake-performs-stop-feeling-eurovision-2016/story?id=39125818',
'info_dict': {
'id': '39125818',
'ext': 'mp4',
'display_id': 'justin-timberlake-performs-stop-feeling-eurovision-2016',
'title': 'Justin Timberlake Drops Hints For Secret Single',
'description': 'Lara Spencer reports the buzziest stories of the day in "GMA" Pop News.',
'upload_date': '20160515',
'timestamp': 1463329500,
},
'params': {
# m3u8 download
'skip_download': True,
# The embedded YouTube video is blocked due to copyright issues
'playlist_items': '1',
},
'add_ie': ['AbcNewsVideo'],
}, {
'url': 'http://abcnews.go.com/Technology/exclusive-apple-ceo-tim-cook-iphone-cracking-software/story?id=37173343',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
r'window\.abcnvideo\.url\s*=\s*"([^"]+)"', webpage, 'video URL')
full_video_url = compat_urlparse.urljoin(url, video_url)
youtube_url = self._html_search_regex(
r'<iframe[^>]+src="(https://www\.youtube\.com/embed/[^"]+)"',
webpage, 'YouTube URL', default=None)
timestamp = None
date_str = self._html_search_regex(
r'<span[^>]+class="timestamp">([^<]+)</span>',
webpage, 'timestamp', fatal=False)
if date_str:
tz_offset = 0
if date_str.endswith(' ET'): # Eastern Time
tz_offset = -5
date_str = date_str[:-3]
date_formats = ['%b. %d, %Y', '%b %d, %Y, %I:%M %p']
for date_format in date_formats:
try:
timestamp = calendar.timegm(time.strptime(date_str.strip(), date_format))
except ValueError:
continue
if timestamp is not None:
timestamp -= tz_offset * 3600
entry = {
'_type': 'url_transparent',
'ie_key': AbcNewsVideoIE.ie_key(),
'url': full_video_url,
'id': video_id,
'display_id': display_id,
'timestamp': timestamp,
}
if youtube_url:
entries = [entry, self.url_result(youtube_url, 'Youtube')]
return self.playlist_result(entries)
return entry
|
tangmi360/googletest | refs/heads/master | test/gtest_xml_output_unittest.py | 1815 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
|
xujb/odoo | refs/heads/8.0 | addons/l10n_hu/__openerp__.py | 320 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 InnOpen Group Kft (<http://www.innopen.eu>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Hungarian - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'description': """
Base module for Hungarian localization
==========================================
This module consists :
- Generic Hungarian chart of accounts
- Hungarian taxes
- Hungarian Bank information
""",
'author': 'InnOpen Group Kft',
'website': 'http://www.innopen.eu',
'license': 'AGPL-3',
'depends': ['account','account_chart'],
'data': [
'data/account.account.template.csv',
'data/account.tax.code.template.csv',
'data/account.chart.template.csv',
'data/account.tax.template.csv',
'data/account.fiscal.position.template.csv',
'data/account.fiscal.position.tax.template.csv',
'data/res.bank.csv',
],
'installable': True,
'auto_install': False,
}
|
sahlinet/swampdragon | refs/heads/master | tests/test_base_model_router_get_list.py | 13 | from swampdragon.serializers.model_serializer import ModelSerializer
from swampdragon.route_handler import BaseModelRouter
from swampdragon.testing.dragon_testcase import DragonTestCase
from .models import TextModel
class FooModelSerializer(ModelSerializer):
class Meta:
model = TextModel
publish_fields = ('text', )
class FooRouter(BaseModelRouter):
route_name = 'foo'
valid_verbs = ('get_list', )
model = TextModel
serializer_class = FooModelSerializer
def get_query_set(self, **kwargs):
return self.model.objects.all()
class TestBaseModelRouter(DragonTestCase):
def test_get_list(self):
TextModel.objects.create(text='foo bar')
foos = FooRouter(self.connection).get_list()
self.assertListEqual(list(foos), list(TextModel.objects.all()))
|
axinging/chromium-crosswalk | refs/heads/master | third_party/bintrees/bintrees/__init__.py | 156 | #!/usr/bin/env python
#coding:utf-8
# Author: mozman
# Purpose: binary trees package
# Created: 03.05.2010
# Copyright (c) 2010-2013 by Manfred Moitzi
# License: MIT License
from __future__ import absolute_import
__doc__ = """
Binary Tree Package
===================
Python Trees
------------
Balanced and unbalance binary trees written in pure Python with a dict-like API.
Classes
~~~~~~~
* BinaryTree -- unbalanced binary tree
* AVLTree -- balanced AVL-Tree
* RBTree -- balanced Red-Black-Tree
Cython Trees
------------
Basic tree functions written in Cython, merged with TreeMixin to provide the
full API of the Python Trees.
Classes
~~~~~~~
* FastBinaryTree -- unbalanced binary tree
* FastAVLTree -- balanced AVLTree
* FastRBTree -- balanced Red-Black-Tree
Overview of API for all Classes
===============================
* TreeClass ([compare]) -> new empty tree.
* TreeClass(mapping, [compare]) -> new tree initialized from a mapping
* TreeClass(seq, [compare]) -> new tree initialized from seq [(k1, v1), (k2, v2), ... (kn, vn)]
Methods
-------
* __contains__(k) -> True if T has a key k, else False, O(log(n))
* __delitem__(y) <==> del T[y], O(log(n))
* __getitem__(y) <==> T[y], O(log(n))
* __iter__() <==> iter(T)
* __len__() <==> len(T), O(1)
* __max__() <==> max(T), get max item (k,v) of T, O(log(n))
* __min__() <==> min(T), get min item (k,v) of T, O(log(n))
* __and__(other) <==> T & other, intersection
* __or__(other) <==> T | other, union
* __sub__(other) <==> T - other, difference
* __xor__(other) <==> T ^ other, symmetric_difference
* __repr__() <==> repr(T)
* __setitem__(k, v) <==> T[k] = v, O(log(n))
* clear() -> None, Remove all items from T, , O(n)
* copy() -> a shallow copy of T, O(n*log(n))
* discard(k) -> None, remove k from T, if k is present, O(log(n))
* get(k[,d]) -> T[k] if k in T, else d, O(log(n))
* is_empty() -> True if len(T) == 0, O(1)
* items([reverse]) -> list of T's (k, v) pairs, as 2-tuples, O(n)
* keys([reverse]) -> list of T's keys, O(n)
* pop(k[,d]) -> v, remove specified key and return the corresponding value, O(log(n))
* popitem() -> (k, v), remove and return some (key, value) pair as a 2-tuple, O(log(n))
* setdefault(k[,d]) -> T.get(k, d), also set T[k]=d if k not in T, O(log(n))
* update(E) -> None. Update T from dict/iterable E, O(E*log(n))
* values([reverse]) -> list of T's values, O(n)
walk forward/backward, O(log(n))
* prev_item(key) -> get (k, v) pair, where k is predecessor to key, O(log(n))
* prev_key(key) -> k, get the predecessor of key, O(log(n))
* succ_item(key) -> get (k,v) pair as a 2-tuple, where k is successor to key, O(log(n))
* succ_key(key) -> k, get the successor of key, O(log(n))
slicing by keys
* itemslice(s, e) -> generator for (k, v) items of T for s <= key < e, O(n)
* keyslice(s, e) -> generator for keys of T for s <= key < e, O(n)
* valueslice(s, e) -> generator for values of T for s <= key < e, O(n)
* T[s:e] -> TreeSlice object, with keys in range s <= key < e, O(n)
* del T[s:e] -> remove items by key slicing, for s <= key < e, O(n)
if 's' is None or T[:e] TreeSlice/iterator starts with value of min_key()
if 'e' is None or T[s:] TreeSlice/iterator ends with value of max_key()
T[:] is a TreeSlice which represents the whole tree.
TreeSlice is a tree wrapper with range check, and contains no references
to objects, deleting objects in the associated tree also deletes the object
in the TreeSlice.
* TreeSlice[k] -> get value for key k, raises KeyError if k not exists in range s:e
* TreeSlice[s1:e1] -> TreeSlice object, with keys in range s1 <= key < e1
* new lower bound is max(s, s1)
* new upper bound is min(e, e1)
TreeSlice methods:
* items() -> generator for (k, v) items of T, O(n)
* keys() -> generator for keys of T, O(n)
* values() -> generator for values of T, O(n)
* __iter__ <==> keys()
* __repr__ <==> repr(T)
* __contains__(key)-> True if TreeSlice has a key k, else False, O(log(n))
Heap methods
* max_item() -> get biggest (key, value) pair of T, O(log(n))
* max_key() -> get biggest key of T, O(log(n))
* min_item() -> get smallest (key, value) pair of T, O(log(n))
* min_key() -> get smallest key of T, O(log(n))
* pop_min() -> (k, v), remove item with minimum key, O(log(n))
* pop_max() -> (k, v), remove item with maximum key, O(log(n))
* nlargest(i[,pop]) -> get list of i largest items (k, v), O(i*log(n))
* nsmallest(i[,pop]) -> get list of i smallest items (k, v), O(i*log(n))
Set methods (using frozenset)
* intersection(t1, t2, ...) -> Tree with keys *common* to all trees
* union(t1, t2, ...) -> Tree with keys from *either* trees
* difference(t1, t2, ...) -> Tree with keys in T but not any of t1, t2, ...
* symmetric_difference(t1) -> Tree with keys in either T and t1 but not both
* issubset(S) -> True if every element in T is in S
* issuperset(S) -> True if every element in S is in T
* isdisjoint(S) -> True if T has a null intersection with S
Classmethods
* fromkeys(S[,v]) -> New tree with keys from S and values equal to v.
"""
__all__ = [
'FastBinaryTree',
'FastAVLTree',
'FastRBTree',
'BinaryTree',
'AVLTree',
'RBTree'
]
from .treemixin import TreeMixin
from .bintree import BinaryTree
from .avltree import AVLTree
from .rbtree import RBTree
try:
from .qbintree import cBinaryTree
class FastBinaryTree(cBinaryTree, TreeMixin):
""" Faster unbalanced binary tree written in Cython with C-Code. """
except ImportError: # fall back to pure Python version
FastBinaryTree = BinaryTree
except ValueError: # for pypy
FastBinaryTree = BinaryTree
try:
from .qavltree import cAVLTree
class FastAVLTree(cAVLTree, TreeMixin):
""" Faster balanced AVL-Tree written in Cython with C-Code. """
except ImportError: # fall back to pure Python version
FastAVLTree = AVLTree
except ValueError: # for pypy
FastAVLTree = AVLTree
try:
from .qrbtree import cRBTree
class FastRBTree(cRBTree, TreeMixin):
""" Faster balanced Red-Black-Tree written in Cython with C-Code. """
except ImportError: # fall back to pure Python version
FastRBTree = RBTree
except ValueError: # for pypy
FastRBTree = RBTree
|
yichenfeng/AAFApplicationAngular | refs/heads/master | api/database.py | 3 | from pymongo import MongoClient
from gridfs import GridFS
from bson.objectid import ObjectId
from datetime import datetime
from os import environ
from const import RequestType
def GetAdminUsers(db=None):
if not db:
client = MongoClient('data', 27017)
db = client.aaf_db
admins = []
for admin in db.admin_users.find():
admins.append(dict(admin))
return admins
class MongoConnection(object):
def __init__(self, db=None):
if db:
self.db = db
else:
self.db = self.GetDb()
#instance of Mongo DB Connection config connection and db values
def GetDb(self):
client = MongoClient('data', 27017)
return client.aaf_db
#get the collection for the request type
def GetCollection(self, request_type):
if request_type == RequestType.ASSISTANCE:
return self.db.assistance_requests
elif request_type == RequestType.DONATION:
return self.db.donation_requests
else:
raise Exception('Invalid Request Collection')
def GetGridFS(self):
return GridFS(self.db, collection='request_documents')
class MongoInterface(object):
def _getObjectId(self, obj):
return str(obj)
def findDocuments(self, collection, query, sort=None):
return_value = { }
results = [ ]
#simple pagination. Can be costly with later pages in larger result sets
search_results = collection.find(query)
return search_results
def getDocument(self, collection, id):
doc = collection.find_one({'_id':ObjectId(id)})
if doc:
doc['_id'] = self._getObjectId(doc['_id'])
return doc
else:
return None
def insertDocument(self, collection, data):
result = collection.insert_one(data).inserted_id
return self._getObjectId(result)
def updateDocument(self, collection, data, id, **kwargs):
update_data = {'$set' : data}
if 'push_data' in kwargs:
update_data['$push'] = kwargs['push_data']
if 'pull_data' in kwargs:
update_data['$pull'] = kwargs['pull_data']
doc = collection.update({'_id':ObjectId(id)}, update_data)
return self._getObjectId(doc)
def getFile(self, collection, id):
file = collection.get(ObjectId(id))
return file.read()
def insertFile(self, collection, data):
file = collection.put(data.encode("UTF-8"))
return self._getObjectId(file)
def deleteFile(self, collection, id):
return collection.remove(ObjectId(id))
if __name__ == '__main__':
db = MongoClient('172.18.0.2', 27017).aaf_db
print(GetAdminUsers(db))
#GetAdminUsers
#db = MongoClient('172.18.0.2', 27017).aaf_db
#db.admin_users.insert({'userId' : 10705332, 'userName' : 'Trevor Robinson'})
#print(GetAdminUsers(db))
|
nicobustillos/odoo | refs/heads/8.0 | addons/crm/wizard/crm_partner_binding.py | 177 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class crm_partner_binding(osv.osv_memory):
"""
Handle the partner binding or generation in any CRM wizard that requires
such feature, like the lead2opportunity wizard, or the
phonecall2opportunity wizard. Try to find a matching partner from the
CRM model's information (name, email, phone number, etc) or create a new
one on the fly.
Use it like a mixin with the wizard of your choice.
"""
_name = 'crm.partner.binding'
_description = 'Handle partner binding or generation in CRM wizards.'
_columns = {
'action': fields.selection([
('exist', 'Link to an existing customer'),
('create', 'Create a new customer'),
('nothing', 'Do not link to a customer')
], 'Related Customer', required=True),
'partner_id': fields.many2one('res.partner', 'Customer'),
}
def _find_matching_partner(self, cr, uid, context=None):
"""
Try to find a matching partner regarding the active model data, like
the customer's name, email, phone number, etc.
:return int partner_id if any, False otherwise
"""
if context is None:
context = {}
partner_id = False
partner_obj = self.pool.get('res.partner')
# The active model has to be a lead or a phonecall
if (context.get('active_model') == 'crm.lead') and context.get('active_id'):
active_model = self.pool.get('crm.lead').browse(cr, uid, context.get('active_id'), context=context)
elif (context.get('active_model') == 'crm.phonecall') and context.get('active_id'):
active_model = self.pool.get('crm.phonecall').browse(cr, uid, context.get('active_id'), context=context)
# Find the best matching partner for the active model
if (active_model):
partner_obj = self.pool.get('res.partner')
# A partner is set already
if active_model.partner_id:
partner_id = active_model.partner_id.id
# Search through the existing partners based on the lead's email
elif active_model.email_from:
partner_ids = partner_obj.search(cr, uid, [('email', '=', active_model.email_from)], context=context)
if partner_ids:
partner_id = partner_ids[0]
# Search through the existing partners based on the lead's partner or contact name
elif active_model.partner_name:
partner_ids = partner_obj.search(cr, uid, [('name', 'ilike', '%'+active_model.partner_name+'%')], context=context)
if partner_ids:
partner_id = partner_ids[0]
elif active_model.contact_name:
partner_ids = partner_obj.search(cr, uid, [
('name', 'ilike', '%'+active_model.contact_name+'%')], context=context)
if partner_ids:
partner_id = partner_ids[0]
return partner_id
def default_get(self, cr, uid, fields, context=None):
res = super(crm_partner_binding, self).default_get(cr, uid, fields, context=context)
partner_id = self._find_matching_partner(cr, uid, context=context)
if 'action' in fields:
res['action'] = partner_id and 'exist' or 'create'
if 'partner_id' in fields:
res['partner_id'] = partner_id
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ChristineLaMuse/mozillians | refs/heads/master | vendor-local/lib/python/kombu/transport/pyamqp.py | 12 | """
kombu.transport.pyamqp
======================
pure python amqp transport.
"""
from __future__ import absolute_import
import amqp
from kombu.exceptions import (
StdConnectionError,
StdChannelError,
VersionMismatch,
)
from kombu.utils.amq_manager import get_manager
from . import base
DEFAULT_PORT = 5672
if amqp.VERSION < (0, 9, 3): # pragma: no cover
raise VersionMismatch('Please install amqp version 0.9.3 or higher.')
class Message(base.Message):
def __init__(self, channel, msg, **kwargs):
props = msg.properties
super(Message, self).__init__(channel,
body=msg.body,
delivery_tag=msg.delivery_tag,
content_type=props.get('content_type'),
content_encoding=props.get('content_encoding'),
delivery_info=msg.delivery_info,
properties=msg.properties,
headers=props.get('application_headers') or {},
**kwargs)
class Channel(amqp.Channel, base.StdChannel):
Message = Message
def prepare_message(self, body, priority=None,
content_type=None, content_encoding=None, headers=None,
properties=None):
"""Encapsulate data into a AMQP message."""
return amqp.Message(body, priority=priority,
content_type=content_type,
content_encoding=content_encoding,
application_headers=headers,
**properties)
def message_to_python(self, raw_message):
"""Convert encoded message body back to a Python value."""
return self.Message(self, raw_message)
class Connection(amqp.Connection):
Channel = Channel
class Transport(base.Transport):
Connection = Connection
default_port = DEFAULT_PORT
# it's very annoying that pyamqp sometimes raises AttributeError
# if the connection is lost, but nothing we can do about that here.
connection_errors = (StdConnectionError, ) + \
amqp.Connection.connection_errors
channel_errors = (StdChannelError, ) + amqp.Connection.channel_errors
nb_keep_draining = True
driver_name = "py-amqp"
driver_type = "amqp"
supports_heartbeats = True
supports_ev = True
def __init__(self, client, **kwargs):
self.client = client
self.default_port = kwargs.get("default_port") or self.default_port
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return connection.drain_events(**kwargs)
def establish_connection(self):
"""Establish connection to the AMQP broker."""
conninfo = self.client
for name, default_value in self.default_connection_params.items():
if not getattr(conninfo, name, None):
setattr(conninfo, name, default_value)
if conninfo.hostname == 'localhost':
conninfo.hostname = '127.0.0.1'
conn = self.Connection(host=conninfo.host,
userid=conninfo.userid,
password=conninfo.password,
login_method=conninfo.login_method,
virtual_host=conninfo.virtual_host,
insist=conninfo.insist,
ssl=conninfo.ssl,
connect_timeout=conninfo.connect_timeout,
heartbeat=conninfo.heartbeat)
conn.client = self.client
return conn
def close_connection(self, connection):
"""Close the AMQP broker connection."""
connection.client = None
connection.close()
def is_alive(self, connection):
return connection.is_alive()
def verify_connection(self, connection):
return connection.channels is not None and self.is_alive(connection)
def eventmap(self, connection):
return {connection.sock: self.client.drain_nowait}
def on_poll_init(self, poller):
pass
def on_poll_start(self):
return {}
def heartbeat_check(self, connection, rate=2):
return connection.heartbeat_tick(rate=rate)
@property
def default_connection_params(self):
return {'userid': 'guest', 'password': 'guest',
'port': self.default_port,
'hostname': 'localhost', 'login_method': 'AMQPLAIN'}
def get_manager(self, *args, **kwargs):
return get_manager(self.client, *args, **kwargs)
|
muffinresearch/olympia | refs/heads/master | apps/addons/buttons.py | 12 | from django.db.models import Q
from django.shortcuts import render
from django.views.decorators.cache import cache_page
import jingo
import jinja2
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from amo.helpers import urlparams
from amo.urlresolvers import reverse
from addons.models import Addon
from translations.models import Translation
@jinja2.contextfunction
def install_button(context, addon, version=None, show_contrib=True,
show_warning=True, src='', collection=None, size='',
detailed=False, mobile=False, impala=False,
latest_beta=False):
"""
If version isn't given, we use the latest version. You can set latest_beta
parameter to use latest beta version instead.
"""
assert not (version and latest_beta), (
'Only one of version and latest_beta can be specified')
request = context['request']
app, lang = context['APP'], context['LANG']
src = src or context.get('src') or request.GET.get('src', '')
collection = ((collection.uuid if hasattr(collection, 'uuid') else None)
or collection
or context.get('collection')
or request.GET.get('collection')
or request.GET.get('collection_id')
or request.GET.get('collection_uuid'))
button = install_button_factory(addon, app, lang, version, show_contrib,
show_warning, src, collection, size,
detailed, impala,
latest_beta)
installed = (request.user.is_authenticated() and
addon.id in request.amo_user.mobile_addons)
c = {'button': button, 'addon': addon, 'version': button.version,
'installed': installed}
if impala:
template = 'addons/impala/button.html'
elif mobile:
template = 'addons/mobile/button.html'
else:
template = 'addons/button.html'
t = jingo.render_to_string(request, template, c)
return jinja2.Markup(t)
@jinja2.contextfunction
def big_install_button(context, addon, **kwargs):
from addons.helpers import statusflags
flags = jinja2.escape(statusflags(context, addon))
button = install_button(context, addon, detailed=True, size='prominent',
**kwargs)
markup = u'<div class="install-wrapper %s">%s</div>' % (flags, button)
return jinja2.Markup(markup)
@jinja2.contextfunction
def mobile_install_button(context, addon, **kwargs):
from addons.helpers import statusflags
button = install_button(context, addon, detailed=True, size='prominent',
mobile=True, **kwargs)
flags = jinja2.escape(statusflags(context, addon))
markup = u'<div class="install-wrapper %s">%s</div>' % (flags, button)
return jinja2.Markup(markup)
def install_button_factory(*args, **kwargs):
button = InstallButton(*args, **kwargs)
# Order matters. We want to highlight unreviewed before featured. They
# should be mutually exclusive, but you never know.
classes = (('is_persona', PersonaInstallButton),
('lite', LiteInstallButton),
('unreviewed', UnreviewedInstallButton),
('featured', FeaturedInstallButton))
for pred, cls in classes:
if getattr(button, pred, False):
button.__class__ = cls
break
button.prepare()
return button
class InstallButton(object):
button_class = ['download']
install_class = []
install_text = ''
def __init__(self, addon, app, lang, version=None, show_contrib=True,
show_warning=True, src='', collection=None, size='',
detailed=False, impala=False,
latest_beta=False):
self.addon, self.app, self.lang = addon, app, lang
self.latest = version is None
self.version = version
if not self.version:
self.version = (addon.current_beta_version if latest_beta
else addon.current_version)
self.src = src
self.collection = collection
self.size = size
self.detailed = detailed
self.impala = impala
self.is_beta = self.version and self.version.is_beta
version_unreviewed = self.version and self.version.is_unreviewed
self.lite = self.version and self.version.is_lite
self.unreviewed = (addon.is_unreviewed() or version_unreviewed or
self.is_beta)
self.featured = (not self.unreviewed
and not self.lite
and not self.is_beta
and addon.is_featured(app, lang))
self.is_persona = addon.type == amo.ADDON_PERSONA
self._show_contrib = show_contrib
self.show_contrib = (show_contrib and addon.takes_contributions
and addon.annoying == amo.CONTRIB_ROADBLOCK)
self.show_warning = show_warning and self.unreviewed
def prepare(self):
"""Called after the class is set to manage contributions."""
# Get a copy for this instance.
self.button_class = list(self.__class__.button_class)
self.install_class = list(self.__class__.install_class)
if self.show_contrib:
try:
self.button_class.remove('download')
except ValueError:
pass
self.button_class += ['contrib', 'go']
self.install_class.append('contrib')
if self.size:
self.button_class.append(self.size)
if self.is_beta:
self.install_class.append('beta')
def attrs(self):
rv = {}
addon = self.addon
if (self._show_contrib and addon.takes_contributions
and addon.annoying == amo.CONTRIB_AFTER):
rv['data-after'] = 'contrib'
if addon.type == amo.ADDON_SEARCH:
rv['data-search'] = 'true'
return rv
def links(self):
if not self.version:
return []
rv = []
files = [f for f in self.version.all_files
if f.status in amo.VALID_STATUSES]
for file in files:
text, url, os = self.file_details(file)
rv.append(Link(text, self.fix_link(url), os, file))
return rv
def file_details(self, file):
platform = file.platform
if self.latest and not self.is_beta and (
self.addon.status == file.status == amo.STATUS_PUBLIC):
url = file.latest_xpi_url()
elif self.latest and self.is_beta and self.addon.show_beta:
url = file.latest_xpi_url(beta=True)
else:
url = file.get_url_path(self.src)
if platform == amo.PLATFORM_ALL.id:
text, os = _('Download Now'), None
else:
text, os = _('Download'), amo.PLATFORMS[platform]
if self.show_contrib:
# L10n: please keep in the string so → does not wrap.
text = jinja2.Markup(_('Continue to Download →'))
roadblock = reverse('addons.roadblock', args=[self.addon.id])
url = urlparams(roadblock, version=self.version.version)
return text, url, os
def fix_link(self, url):
if self.src:
url = urlparams(url, src=self.src)
if self.collection:
url = urlparams(url, collection_id=self.collection)
return url
class FeaturedInstallButton(InstallButton):
install_class = ['featuredaddon']
install_text = _lazy(u'Featured', 'install_button')
class UnreviewedInstallButton(InstallButton):
install_class = ['unreviewed']
install_text = _lazy(u'Not Reviewed', 'install_button')
button_class = 'download caution'.split()
class LiteInstallButton(InstallButton):
install_class = ['lite']
button_class = ['caution']
install_text = _lazy(u'Experimental', 'install_button')
class PersonaInstallButton(InstallButton):
install_class = ['persona']
def links(self):
return [Link(_(u'Add to {0}').format(unicode(self.app.pretty)),
reverse('addons.detail', args=[amo.PERSONAS_ADDON_ID]))]
def attrs(self):
rv = super(PersonaInstallButton, self).attrs()
rv['data-browsertheme'] = self.addon.persona.json_data
return rv
class Link(object):
def __init__(self, text, url, os=None, file=None):
self.text, self.url, self.os, self.file = text, url, os, file
# Cache it for a year.
@cache_page(60 * 60 * 24 * 365)
def js(request):
return render(request, 'addons/popups.html',
content_type='text/javascript')
def smorgasbord(request):
"""
Gather many different kinds of tasty add-ons together.
Great for testing install buttons.
"""
def _compat(min, max):
# Helper for faking compatible_apps.
return {'min': {'version': min}, 'max': {'version': max}}
addons = []
normal_version = _compat('1.0', '10.0')
older_version = _compat('1.0', '2.0')
newer_version = _compat('9.0', '10.0')
def all_versions(addon, base_tag):
x = (('', normal_version),
(' + older version', older_version),
(' + newer version', newer_version))
for extra, version in x:
a = addon()
a.tag = base_tag + extra
a.compatible_apps[request.APP] = version
addons.append(a)
# Featured.
featured = Addon.objects.featured(request.APP)
addons.append(featured[0])
addons[-1].tag = 'featured'
normal = Addon.objects.listed(request.APP).exclude(id__in=featured)
# Normal, Older Version, Newer Version.
all_versions(lambda: normal[0], 'normal')
# Unreviewed.
exp = Addon.objects.unreviewed()
all_versions(lambda: exp[0], 'unreviewed')
# Multiple Platforms.
addons.append(Addon.objects.get(id=2313))
addons[-1].tag = 'platformer'
# Multiple Platforms + EULA.
addons.append(Addon.objects.get(id=2313))
addons[-1].eula = Translation(localized_string='xxx')
addons[-1].tag = 'platformer + eula'
# Incompatible Platform + EULa.
addons.append(Addon.objects.get(id=5308))
addons[-1].eula = Translation(localized_string='xxx')
addons[-1].tag = 'windows/linux-only + eula'
# Incompatible Platform.
all_versions(lambda: Addon.objects.get(id=5308), 'windows/linux-only')
# EULA.
eula = (Q(eula__isnull=False, eula__localized_string__isnull=False)
& ~Q(eula__localized_string=''))
addons.append(normal.filter(eula)[0])
addons[-1].tag = 'eula'
addons.append(exp.filter(eula)[0])
addons[-1].tag = 'eula + unreviewed'
# Contributions.
addons.append(normal.filter(annoying=1)[0])
addons[-1].tag = 'contrib: passive'
addons.append(normal.filter(annoying=2)[0])
addons[-1].tag = 'contrib: after'
addons.append(normal.filter(annoying=3)[0])
addons[-1].tag = 'contrib: roadblock'
addons.append(Addon.objects.get(id=2608))
addons[-1].tag = 'after + eula'
addons.append(Addon.objects.get(id=8442))
addons[-1].tag = 'roadblock + eula'
# Other App.
addons.append(Addon.objects.get(id=5326))
addons[-1].tag = 'tbird'
# Mobile.
addons.append(Addon.objects.get(id=53476))
addons[-1].tag = 'mobile'
# Search Engine.
addons.append(Addon.objects.filter(type=amo.ADDON_SEARCH)[0])
addons[-1].tag = 'search engine'
# Beta Version
beta = normal.filter(versions__files__status=amo.STATUS_BETA)[0]
beta.tag = 'beta version'
# Theme.
# Persona.
addons.append(Addon.objects.filter(type=amo.ADDON_PERSONA)[0])
addons[-1].tag = 'persona'
# Future Version.
# No versions.
return render(request, 'addons/smorgasbord.html',
{'addons': addons, 'beta': beta})
|
B3AU/waveTree | refs/heads/waveTree | examples/covariance/plot_lw_vs_oas.py | 8 | """
=============================
Ledoit-Wolf vs OAS estimation
=============================
The usual covariance maximum likelihood estimate can be regularized
using shrinkage. Ledoit and Wolf proposed a close formula to compute
the asymptotically optimal shrinkage parameter (minimizing a MSE
criterion), yielding the Ledoit-Wolf covariance estimate.
Chen et al. proposed an improvement of the Ledoit-Wolf shrinkage
parameter, the OAS coefficient, whose convergence is significantly
better under the assumption that the data are gaussian.
This example, inspired from Chen's publication [1], shows a comparison
of the estimated MSE of the LW and OAS methods, using gaussian
distributed data.
[1] "Shrinkage Algorithms for MMSE Covariance Estimation"
Chen et al., IEEE Trans. on Sign. Proc., Volume 58, Issue 10, October 2010.
"""
print(__doc__)
import numpy as np
import pylab as pl
from scipy.linalg import toeplitz, cholesky
from sklearn.covariance import LedoitWolf, OAS
np.random.seed(0)
###############################################################################
n_features = 100
# simulation covariance matrix (AR(1) process)
r = 0.1
real_cov = toeplitz(r ** np.arange(n_features))
coloring_matrix = cholesky(real_cov)
n_samples_range = np.arange(6, 31, 1)
repeat = 100
lw_mse = np.zeros((n_samples_range.size, repeat))
oa_mse = np.zeros((n_samples_range.size, repeat))
lw_shrinkage = np.zeros((n_samples_range.size, repeat))
oa_shrinkage = np.zeros((n_samples_range.size, repeat))
for i, n_samples in enumerate(n_samples_range):
for j in range(repeat):
X = np.dot(
np.random.normal(size=(n_samples, n_features)), coloring_matrix.T)
lw = LedoitWolf(store_precision=False, assume_centered=True)
lw.fit(X)
lw_mse[i, j] = lw.error_norm(real_cov, scaling=False)
lw_shrinkage[i, j] = lw.shrinkage_
oa = OAS(store_precision=False, assume_centered=True)
oa.fit(X)
oa_mse[i, j] = oa.error_norm(real_cov, scaling=False)
oa_shrinkage[i, j] = oa.shrinkage_
# plot MSE
pl.subplot(2, 1, 1)
pl.errorbar(n_samples_range, lw_mse.mean(1), yerr=lw_mse.std(1),
label='Ledoit-Wolf', color='g')
pl.errorbar(n_samples_range, oa_mse.mean(1), yerr=oa_mse.std(1),
label='OAS', color='r')
pl.ylabel("Squared error")
pl.legend(loc="upper right")
pl.title("Comparison of covariance estimators")
pl.xlim(5, 31)
# plot shrinkage coefficient
pl.subplot(2, 1, 2)
pl.errorbar(n_samples_range, lw_shrinkage.mean(1), yerr=lw_shrinkage.std(1),
label='Ledoit-Wolf', color='g')
pl.errorbar(n_samples_range, oa_shrinkage.mean(1), yerr=oa_shrinkage.std(1),
label='OAS', color='r')
pl.xlabel("n_samples")
pl.ylabel("Shrinkage")
pl.legend(loc="lower right")
pl.ylim(pl.ylim()[0], 1. + (pl.ylim()[1] - pl.ylim()[0]) / 10.)
pl.xlim(5, 31)
pl.show()
|
dslomov/intellij-community | refs/heads/master | python/testData/completion/propertyType.py | 83 | class C(object):
def __init__(self):
self._x = []
@property
def x(self):
return self._x
c = C()
c.x.app<caret>
|
hassoon3/odoo | refs/heads/8.0 | addons/payment_sips/controllers/__init__.py | 4497 | # -*- coding: utf-8 -*-
import main
|
Noviat/odoo | refs/heads/8.0 | addons/payment_ogone/controllers/__init__.py | 4497 | # -*- coding: utf-8 -*-
import main
|
garthylou/Libreosteo | refs/heads/master | winserver.py | 1 | # This file is part of LibreOsteo.
#
# LibreOsteo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LibreOsteo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LibreOsteo. If not, see <http://www.gnu.org/licenses/>.
"""
Requires Mark Hammond's pywin32 package.
"""
# Python stdlib imports
import sys
import logging
import os, os.path
if getattr(sys, 'frozen', False):
# frozen
dir = os.path.dirname(sys.executable)
sys.path.append(dir)
os.environ['PATH'] = (os.environ['PATH'] + ";").join(p + ";"
for p in sys.path)
# Win32 service imports
import win32serviceutil
import win32service, win32api
import servicemanager
# Third-party imports
import cherrypy
import patch
import server
class LibreosteoService(win32serviceutil.ServiceFramework):
"""Libreosteo NT Service."""
_svc_name_ = "LibreosteoService"
_svc_display_name_ = "Libreosteo Service"
def log(self, msg):
servicemanager.LogInfoMsg(str(msg))
def SvcDoRun(self):
self.ReportServiceStatus(win32service.SERVICE_START_PENDING)
try:
self.log("Create the Libreosteo server")
config = server.configure()
_srvr = server.Server(config)
# in practice, you will want to specify a value for
# log.error_file below or in your config file. If you
# use a config file, be sure to use an absolute path to
# it, as you can't be assured what path your service
# will run in.
self.log("Configure the server")
cherrypy.config.update({
'global': {
'log.screen':
False,
'engine.autoreload.on':
False,
'engine.SIGHUP':
None,
'engine.SIGTERM':
None,
'log.error_file':
os.path.join(_srvr.base_dir, 'libreosteo_error.log'),
'tools.log_tracebacks.on':
True,
'log.access_file':
os.path.join(_srvr.base_dir, 'libreosteo_access.log'),
'server.socket_port':
config["server_port"],
'server.socket_host':
'0.0.0.0',
}
})
self.ReportServiceStatus(win32service.SERVICE_RUNNING)
servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE,
servicemanager.PYS_SERVICE_STARTED,
(self._svc_name_, ''))
self.log("Run the service Libreosteo")
_srvr.run()
except Exception as e:
s = str(e)
self.log('Exception : %s' % s)
self.SvcStop()
def SvcStop(self):
self.log("Stopping service")
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
cherrypy.engine.exit()
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
# very important for use with py2exe
# otherwise the Service Controller never knows that it is stopped !
self.log("Service stopped")
if __name__ == '__main__':
if getattr(sys, 'frozen', False):
# frozen
DATA_FOLDER = os.path.dirname(sys.executable)
else:
# unfrozen
DATA_FOLDER = os.path.dirname(os.path.realpath(__file__))
LOG_CONF = {
'version': 1,
'formatters': {
'void': {
'format': ''
},
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'standard',
'filename': os.path.join(DATA_FOLDER, 'default.log'),
'maxBytes': 10485760,
'backupCount': 20,
'encoding': 'utf8'
},
'cherrypy_console': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'standard',
'filename': os.path.join(DATA_FOLDER, 'console.log'),
'maxBytes': 10485760,
'backupCount': 20,
'encoding': 'utf8'
},
'cherrypy_access': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'standard',
'filename': os.path.join(DATA_FOLDER, 'access.log'),
'maxBytes': 10485760,
'backupCount': 20,
'encoding': 'utf8'
},
'cherrypy_error': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'standard',
'filename': os.path.join(DATA_FOLDER, 'errors.log'),
'maxBytes': 10485760,
'backupCount': 20,
'encoding': 'utf8'
},
},
'loggers': {
'django.utils.translation': {
'handlers': ['default'],
'level': 'INFO'
},
'': {
'handlers': ['default'],
'level': 'INFO'
},
'root': {
'handlers': ['default'],
'level': 'INFO'
},
'db': {
'handlers': ['default'],
'level': 'INFO',
'propagate': True
},
'cherrypy.access': {
'handlers': ['cherrypy_access'],
'level': 'INFO',
'propagate': True
},
'cherrypy.error': {
'handlers': ['cherrypy_console', 'cherrypy_error'],
'level': 'INFO',
'propagate': True
},
'libreosteoweb.api': {
'handlers': ['cherrypy_console', 'default'],
'level': 'INFO',
'propagate': True
},
'libreosteo': {
'handlers': ['cherrypy_console', 'default'],
'level': 'INFO',
'propagate': True
},
'Libreosteo': {
'handlers': ['cherrypy_console', 'default'],
'level': 'INFO',
'propagate': True
},
}
}
logging.config.dictConfig(LOG_CONF)
os.chdir(DATA_FOLDER)
logging.info(os.getcwd())
logger = logging.getLogger(__name__)
logger.info("Frozen with attribute value %s" %
(getattr(sys, 'frozen', False)))
if len(sys.argv) == 1:
logging.info("Start service")
logging.info("Handle starting of the service")
try:
servicemanager.Initialize()
servicemanager.PrepareToHostSingle(LibreosteoService)
servicemanager.StartServiceCtrlDispatcher()
except Exception as e:
logging.exception("Exception when starting service")
else:
logging.info("Start Controller")
logging.info("Handle command line on service manager")
try:
win32serviceutil.HandleCommandLine(LibreosteoService)
except Exception as e:
logging.exception("Exception when starting service")
|
donny/mako-mori | refs/heads/master | external/boto/emr/instance_group.py | 179 | #
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class InstanceGroup(object):
def __init__(self, num_instances, role, type, market, name, bidprice=None):
self.num_instances = num_instances
self.role = role
self.type = type
self.market = market
self.name = name
if market == 'SPOT':
if not bidprice:
raise ValueError('bidprice must be specified if market == SPOT')
self.bidprice = str(bidprice)
def __repr__(self):
if self.market == 'SPOT':
return '%s.%s(name=%r, num_instances=%r, role=%r, type=%r, market = %r, bidprice = %r)' % (
self.__class__.__module__, self.__class__.__name__,
self.name, self.num_instances, self.role, self.type, self.market,
self.bidprice)
else:
return '%s.%s(name=%r, num_instances=%r, role=%r, type=%r, market = %r)' % (
self.__class__.__module__, self.__class__.__name__,
self.name, self.num_instances, self.role, self.type, self.market)
|
pacificIT/mopidy | refs/heads/develop | tests/mpd/protocol/test_music_db.py | 14 | from __future__ import absolute_import, unicode_literals
import unittest
import mock
from mopidy.models import Album, Artist, Playlist, Ref, SearchResult, Track
from mopidy.mpd.protocol import music_db, stored_playlists
from tests.mpd import protocol
# TODO: split into more modules for faster parallel tests?
class QueryFromMpdSearchFormatTest(unittest.TestCase):
def test_dates_are_extracted(self):
result = music_db._query_from_mpd_search_parameters(
['Date', '1974-01-02', 'Date', '1975'], music_db._SEARCH_MAPPING)
self.assertEqual(result['date'][0], '1974-01-02')
self.assertEqual(result['date'][1], '1975')
def test_empty_value_is_ignored(self):
result = music_db._query_from_mpd_search_parameters(
['Date', ''], music_db._SEARCH_MAPPING)
self.assertEqual(result, {})
def test_whitespace_value_is_ignored(self):
result = music_db._query_from_mpd_search_parameters(
['Date', ' '], music_db._SEARCH_MAPPING)
self.assertEqual(result, {})
# TODO Test more mappings
class QueryFromMpdListFormatTest(unittest.TestCase):
pass # TODO
# TODO: why isn't core.playlists.filter getting deprecation warnings?
class MusicDatabaseHandlerTest(protocol.BaseTestCase):
def test_count(self):
self.send_request('count "artist" "needle"')
self.assertInResponse('songs: 0')
self.assertInResponse('playtime: 0')
self.assertInResponse('OK')
def test_count_without_quotes(self):
self.send_request('count artist "needle"')
self.assertInResponse('songs: 0')
self.assertInResponse('playtime: 0')
self.assertInResponse('OK')
def test_count_with_multiple_pairs(self):
self.send_request('count "artist" "foo" "album" "bar"')
self.assertInResponse('songs: 0')
self.assertInResponse('playtime: 0')
self.assertInResponse('OK')
def test_count_correct_length(self):
# Count the lone track
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[
Track(uri='dummy:a', name='foo', date='2001', length=4000),
])
self.send_request('count "title" "foo"')
self.assertInResponse('songs: 1')
self.assertInResponse('playtime: 4')
self.assertInResponse('OK')
# Count multiple tracks
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[
Track(uri='dummy:b', date="2001", length=50000),
Track(uri='dummy:c', date="2001", length=600000),
])
self.send_request('count "date" "2001"')
self.assertInResponse('songs: 2')
self.assertInResponse('playtime: 650')
self.assertInResponse('OK')
def test_count_with_track_length_none(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[
Track(uri='dummy:b', date="2001", length=None),
])
self.send_request('count "date" "2001"')
self.assertInResponse('songs: 1')
self.assertInResponse('playtime: 0')
self.assertInResponse('OK')
def test_findadd(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(uri='dummy:a', name='A')])
self.assertEqual(self.core.tracklist.length.get(), 0)
self.send_request('findadd "title" "A"')
self.assertEqual(self.core.tracklist.length.get(), 1)
self.assertEqual(self.core.tracklist.tracks.get()[0].uri, 'dummy:a')
self.assertInResponse('OK')
def test_searchadd(self):
self.backend.library.dummy_search_result = SearchResult(
tracks=[Track(uri='dummy:a', name='A')])
self.assertEqual(self.core.tracklist.length.get(), 0)
self.send_request('searchadd "title" "a"')
self.assertEqual(self.core.tracklist.length.get(), 1)
self.assertEqual(self.core.tracklist.tracks.get()[0].uri, 'dummy:a')
self.assertInResponse('OK')
def test_searchaddpl_appends_to_existing_playlist(self):
playlist = self.core.playlists.create('my favs').get()
playlist = playlist.replace(tracks=[
Track(uri='dummy:x', name='X'),
Track(uri='dummy:y', name='y'),
])
self.core.playlists.save(playlist)
self.backend.library.dummy_search_result = SearchResult(
tracks=[Track(uri='dummy:a', name='A')])
items = self.core.playlists.get_items(playlist.uri).get()
self.assertEqual(len(items), 2)
self.send_request('searchaddpl "my favs" "title" "a"')
items = self.core.playlists.get_items(playlist.uri).get()
self.assertEqual(len(items), 3)
self.assertEqual(items[0].uri, 'dummy:x')
self.assertEqual(items[1].uri, 'dummy:y')
self.assertEqual(items[2].uri, 'dummy:a')
self.assertInResponse('OK')
def test_searchaddpl_creates_missing_playlist(self):
self.backend.library.dummy_search_result = SearchResult(
tracks=[Track(uri='dummy:a', name='A')])
playlists = self.core.playlists.as_list().get()
self.assertNotIn('my favs', {p.name for p in playlists})
self.send_request('searchaddpl "my favs" "title" "a"')
playlists = self.core.playlists.as_list().get()
playlist = {p.name: p for p in playlists}['my favs']
items = self.core.playlists.get_items(playlist.uri).get()
self.assertEqual(len(items), 1)
self.assertEqual(items[0].uri, 'dummy:a')
self.assertInResponse('OK')
def test_listall_without_uri(self):
tracks = [Track(uri='dummy:/a', name='a'),
Track(uri='dummy:/foo/b', name='b')]
self.backend.library.dummy_library = tracks
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo'),
Ref.album(uri='dummy:/album', name='album'),
Ref.artist(uri='dummy:/artist', name='artist'),
Ref.playlist(uri='dummy:/pl', name='pl')],
'dummy:/foo': [Ref.track(uri='dummy:/foo/b', name='b')]}
self.send_request('listall')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('directory: /dummy/foo')
self.assertInResponse('directory: /dummy/album')
self.assertInResponse('directory: /dummy/artist')
self.assertInResponse('directory: /dummy/pl')
self.assertInResponse('file: dummy:/foo/b')
self.assertInResponse('OK')
def test_listall_with_uri(self):
tracks = [Track(uri='dummy:/a', name='a'),
Track(uri='dummy:/foo/b', name='b')]
self.backend.library.dummy_library = tracks
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')],
'dummy:/foo': [Ref.track(uri='dummy:/foo/b', name='b')]}
self.send_request('listall "/dummy/foo"')
self.assertNotInResponse('file: dummy:/a')
self.assertInResponse('directory: /dummy/foo')
self.assertInResponse('file: dummy:/foo/b')
self.assertInResponse('OK')
def test_listall_with_unknown_uri(self):
self.send_request('listall "/unknown"')
self.assertEqualResponse('ACK [50@0] {listall} Not found')
def test_listall_for_dir_with_and_without_leading_slash_is_the_same(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
response1 = self.send_request('listall "dummy"')
response2 = self.send_request('listall "/dummy"')
self.assertEqual(response1, response2)
def test_listall_for_dir_with_and_without_trailing_slash_is_the_same(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
response1 = self.send_request('listall "dummy"')
response2 = self.send_request('listall "dummy/"')
self.assertEqual(response1, response2)
def test_listall_duplicate(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.directory(uri='dummy:/a1', name='a'),
Ref.directory(uri='dummy:/a2', name='a')]}
self.send_request('listall')
self.assertInResponse('directory: /dummy/a')
self.assertInResponse('directory: /dummy/a [2]')
def test_listallinfo_without_uri(self):
tracks = [Track(uri='dummy:/a', name='a'),
Track(uri='dummy:/foo/b', name='b')]
self.backend.library.dummy_library = tracks
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo'),
Ref.album(uri='dummy:/album', name='album'),
Ref.artist(uri='dummy:/artist', name='artist'),
Ref.playlist(uri='dummy:/pl', name='pl')],
'dummy:/foo': [Ref.track(uri='dummy:/foo/b', name='b')]}
self.send_request('listallinfo')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Title: a')
self.assertInResponse('directory: /dummy/foo')
self.assertInResponse('directory: /dummy/album')
self.assertInResponse('directory: /dummy/artist')
self.assertInResponse('directory: /dummy/pl')
self.assertInResponse('file: dummy:/foo/b')
self.assertInResponse('Title: b')
self.assertInResponse('OK')
def test_listallinfo_with_uri(self):
tracks = [Track(uri='dummy:/a', name='a'),
Track(uri='dummy:/foo/b', name='b')]
self.backend.library.dummy_library = tracks
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')],
'dummy:/foo': [Ref.track(uri='dummy:/foo/b', name='b')]}
self.send_request('listallinfo "/dummy/foo"')
self.assertNotInResponse('file: dummy:/a')
self.assertNotInResponse('Title: a')
self.assertInResponse('directory: /dummy/foo')
self.assertInResponse('file: dummy:/foo/b')
self.assertInResponse('Title: b')
self.assertInResponse('OK')
def test_listallinfo_with_unknown_uri(self):
self.send_request('listallinfo "/unknown"')
self.assertEqualResponse('ACK [50@0] {listallinfo} Not found')
def test_listallinfo_for_dir_with_and_without_leading_slash_is_same(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
response1 = self.send_request('listallinfo "dummy"')
response2 = self.send_request('listallinfo "/dummy"')
self.assertEqual(response1, response2)
def test_listallinfo_for_dir_with_and_without_trailing_slash_is_same(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
response1 = self.send_request('listallinfo "dummy"')
response2 = self.send_request('listallinfo "dummy/"')
self.assertEqual(response1, response2)
def test_listallinfo_duplicate(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.directory(uri='dummy:/a1', name='a'),
Ref.directory(uri='dummy:/a2', name='a')]}
self.send_request('listallinfo')
self.assertInResponse('directory: /dummy/a')
self.assertInResponse('directory: /dummy/a [2]')
def test_listfiles(self):
self.send_request('listfiles')
self.assertEqualResponse('ACK [0@0] {listfiles} Not implemented')
@mock.patch.object(stored_playlists, '_get_last_modified')
def test_lsinfo_without_path_returns_same_as_for_root(
self, last_modified_mock):
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
self.backend.playlists.set_dummy_playlists([
Playlist(name='a', uri='dummy:/a')])
response1 = self.send_request('lsinfo')
response2 = self.send_request('lsinfo "/"')
self.assertEqual(response1, response2)
@mock.patch.object(stored_playlists, '_get_last_modified')
def test_lsinfo_with_empty_path_returns_same_as_for_root(
self, last_modified_mock):
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
self.backend.playlists.set_dummy_playlists([
Playlist(name='a', uri='dummy:/a')])
response1 = self.send_request('lsinfo ""')
response2 = self.send_request('lsinfo "/"')
self.assertEqual(response1, response2)
@mock.patch.object(stored_playlists, '_get_last_modified')
def test_lsinfo_for_root_includes_playlists(self, last_modified_mock):
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
self.backend.playlists.set_dummy_playlists([
Playlist(name='a', uri='dummy:/a')])
self.send_request('lsinfo "/"')
self.assertInResponse('playlist: a')
self.assertInResponse('Last-Modified: 2015-08-05T22:51:06Z')
self.assertInResponse('OK')
def test_lsinfo_for_root_includes_dirs_for_each_lib_with_content(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
self.send_request('lsinfo "/"')
self.assertInResponse('directory: dummy')
self.assertInResponse('OK')
@mock.patch.object(stored_playlists, '_get_last_modified')
def test_lsinfo_for_dir_with_and_without_leading_slash_is_the_same(
self, last_modified_mock):
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
response1 = self.send_request('lsinfo "dummy"')
response2 = self.send_request('lsinfo "/dummy"')
self.assertEqual(response1, response2)
@mock.patch.object(stored_playlists, '_get_last_modified')
def test_lsinfo_for_dir_with_and_without_trailing_slash_is_the_same(
self, last_modified_mock):
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
response1 = self.send_request('lsinfo "dummy"')
response2 = self.send_request('lsinfo "dummy/"')
self.assertEqual(response1, response2)
def test_lsinfo_for_dir_includes_tracks(self):
self.backend.library.dummy_library = [
Track(uri='dummy:/a', name='a'),
]
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a')]}
self.send_request('lsinfo "/dummy"')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Title: a')
self.assertInResponse('OK')
def test_lsinfo_for_dir_includes_subdirs(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.directory(uri='dummy:/foo', name='foo')]}
self.send_request('lsinfo "/dummy"')
self.assertInResponse('directory: dummy/foo')
self.assertInResponse('OK')
def test_lsinfo_for_empty_dir_returns_nothing(self):
self.backend.library.dummy_browse_result = {
'dummy:/': []}
self.send_request('lsinfo "/dummy"')
self.assertInResponse('OK')
def test_lsinfo_for_dir_does_not_recurse(self):
self.backend.library.dummy_library = [
Track(uri='dummy:/a', name='a'),
]
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.directory(uri='dummy:/foo', name='foo')],
'dummy:/foo': [Ref.track(uri='dummy:/a', name='a')]}
self.send_request('lsinfo "/dummy"')
self.assertNotInResponse('file: dummy:/a')
self.assertInResponse('OK')
def test_lsinfo_for_dir_does_not_include_self(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.directory(uri='dummy:/foo', name='foo')],
'dummy:/foo': [Ref.track(uri='dummy:/a', name='a')]}
self.send_request('lsinfo "/dummy"')
self.assertNotInResponse('directory: dummy')
self.assertInResponse('OK')
def test_lsinfo_for_root_returns_browse_result_before_playlists(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
Ref.directory(uri='dummy:/foo', name='foo')]}
self.backend.playlists.set_dummy_playlists([
Playlist(name='a', uri='dummy:/a')])
response = self.send_request('lsinfo "/"')
self.assertLess(response.index('directory: dummy'),
response.index('playlist: a'))
def test_lsinfo_duplicate(self):
self.backend.library.dummy_browse_result = {
'dummy:/': [Ref.directory(uri='dummy:/a1', name='a'),
Ref.directory(uri='dummy:/a2', name='a')]}
self.send_request('lsinfo "/dummy"')
self.assertInResponse('directory: dummy/a')
self.assertInResponse('directory: dummy/a [2]')
def test_update_without_uri(self):
self.send_request('update')
self.assertInResponse('updating_db: 0')
self.assertInResponse('OK')
def test_update_with_uri(self):
self.send_request('update "file:///dev/urandom"')
self.assertInResponse('updating_db: 0')
self.assertInResponse('OK')
def test_rescan_without_uri(self):
self.send_request('rescan')
self.assertInResponse('updating_db: 0')
self.assertInResponse('OK')
def test_rescan_with_uri(self):
self.send_request('rescan "file:///dev/urandom"')
self.assertInResponse('updating_db: 0')
self.assertInResponse('OK')
class MusicDatabaseFindTest(protocol.BaseTestCase):
def test_find_includes_fake_artist_and_album_tracks(self):
self.backend.library.dummy_find_exact_result = SearchResult(
albums=[Album(uri='dummy:album:a', name='A', date='2001')],
artists=[Artist(uri='dummy:artist:b', name='B')],
tracks=[Track(uri='dummy:track:c', name='C')])
self.send_request('find "any" "foo"')
self.assertInResponse('file: dummy:artist:b')
self.assertInResponse('Title: Artist: B')
self.assertInResponse('file: dummy:album:a')
self.assertInResponse('Title: Album: A')
self.assertInResponse('Date: 2001')
self.assertInResponse('file: dummy:track:c')
self.assertInResponse('Title: C')
self.assertInResponse('OK')
def test_find_artist_does_not_include_fake_artist_tracks(self):
self.backend.library.dummy_find_exact_result = SearchResult(
albums=[Album(uri='dummy:album:a', name='A', date='2001')],
artists=[Artist(uri='dummy:artist:b', name='B')],
tracks=[Track(uri='dummy:track:c', name='C')])
self.send_request('find "artist" "foo"')
self.assertNotInResponse('file: dummy:artist:b')
self.assertNotInResponse('Title: Artist: B')
self.assertInResponse('file: dummy:album:a')
self.assertInResponse('Title: Album: A')
self.assertInResponse('Date: 2001')
self.assertInResponse('file: dummy:track:c')
self.assertInResponse('Title: C')
self.assertInResponse('OK')
def test_find_albumartist_does_not_include_fake_artist_tracks(self):
self.backend.library.dummy_find_exact_result = SearchResult(
albums=[Album(uri='dummy:album:a', name='A', date='2001')],
artists=[Artist(uri='dummy:artist:b', name='B')],
tracks=[Track(uri='dummy:track:c', name='C')])
self.send_request('find "albumartist" "foo"')
self.assertNotInResponse('file: dummy:artist:b')
self.assertNotInResponse('Title: Artist: B')
self.assertInResponse('file: dummy:album:a')
self.assertInResponse('Title: Album: A')
self.assertInResponse('Date: 2001')
self.assertInResponse('file: dummy:track:c')
self.assertInResponse('Title: C')
self.assertInResponse('OK')
def test_find_artist_and_album_does_not_include_fake_tracks(self):
self.backend.library.dummy_find_exact_result = SearchResult(
albums=[Album(uri='dummy:album:a', name='A', date='2001')],
artists=[Artist(uri='dummy:artist:b', name='B')],
tracks=[Track(uri='dummy:track:c', name='C')])
self.send_request('find "artist" "foo" "album" "bar"')
self.assertNotInResponse('file: dummy:artist:b')
self.assertNotInResponse('Title: Artist: B')
self.assertNotInResponse('file: dummy:album:a')
self.assertNotInResponse('Title: Album: A')
self.assertNotInResponse('Date: 2001')
self.assertInResponse('file: dummy:track:c')
self.assertInResponse('Title: C')
self.assertInResponse('OK')
def test_find_album(self):
self.send_request('find "album" "what"')
self.assertInResponse('OK')
def test_find_album_without_quotes(self):
self.send_request('find album "what"')
self.assertInResponse('OK')
def test_find_artist(self):
self.send_request('find "artist" "what"')
self.assertInResponse('OK')
def test_find_artist_without_quotes(self):
self.send_request('find artist "what"')
self.assertInResponse('OK')
def test_find_albumartist(self):
self.send_request('find "albumartist" "what"')
self.assertInResponse('OK')
def test_find_albumartist_without_quotes(self):
self.send_request('find albumartist "what"')
self.assertInResponse('OK')
def test_find_composer(self):
self.send_request('find "composer" "what"')
self.assertInResponse('OK')
def test_find_composer_without_quotes(self):
self.send_request('find composer "what"')
self.assertInResponse('OK')
def test_find_performer(self):
self.send_request('find "performer" "what"')
self.assertInResponse('OK')
def test_find_performer_without_quotes(self):
self.send_request('find performer "what"')
self.assertInResponse('OK')
def test_find_filename(self):
self.send_request('find "filename" "afilename"')
self.assertInResponse('OK')
def test_find_filename_without_quotes(self):
self.send_request('find filename "afilename"')
self.assertInResponse('OK')
def test_find_file(self):
self.send_request('find "file" "afilename"')
self.assertInResponse('OK')
def test_find_file_without_quotes(self):
self.send_request('find file "afilename"')
self.assertInResponse('OK')
def test_find_title(self):
self.send_request('find "title" "what"')
self.assertInResponse('OK')
def test_find_title_without_quotes(self):
self.send_request('find title "what"')
self.assertInResponse('OK')
def test_find_track_no(self):
self.send_request('find "track" "10"')
self.assertInResponse('OK')
def test_find_track_no_without_quotes(self):
self.send_request('find track "10"')
self.assertInResponse('OK')
def test_find_track_no_without_filter_value(self):
self.send_request('find "track" ""')
self.assertInResponse('OK')
def test_find_genre(self):
self.send_request('find "genre" "what"')
self.assertInResponse('OK')
def test_find_genre_without_quotes(self):
self.send_request('find genre "what"')
self.assertInResponse('OK')
def test_find_date(self):
self.send_request('find "date" "2002-01-01"')
self.assertInResponse('OK')
def test_find_date_without_quotes(self):
self.send_request('find date "2002-01-01"')
self.assertInResponse('OK')
def test_find_date_with_capital_d_and_incomplete_date(self):
self.send_request('find Date "2005"')
self.assertInResponse('OK')
def test_find_else_should_fail(self):
self.send_request('find "somethingelse" "what"')
self.assertEqualResponse('ACK [2@0] {find} incorrect arguments')
def test_find_album_and_artist(self):
self.send_request('find album "album_what" artist "artist_what"')
self.assertInResponse('OK')
def test_find_without_filter_value(self):
self.send_request('find "album" ""')
self.assertInResponse('OK')
class MusicDatabaseListTest(protocol.BaseTestCase):
def test_list(self):
self.backend.library.dummy_get_distinct_result = {
'artist': set(['A Artist'])}
self.send_request('list "artist" "artist" "foo"')
self.assertInResponse('Artist: A Artist')
self.assertInResponse('OK')
def test_list_foo_returns_ack(self):
self.send_request('list "foo"')
self.assertEqualResponse('ACK [2@0] {list} incorrect arguments')
# Track title
def test_list_title(self):
self.send_request('list "title"')
self.assertInResponse('OK')
# Artist
def test_list_artist_with_quotes(self):
self.send_request('list "artist"')
self.assertInResponse('OK')
def test_list_artist_without_quotes(self):
self.send_request('list artist')
self.assertInResponse('OK')
def test_list_artist_without_quotes_and_capitalized(self):
self.send_request('list Artist')
self.assertInResponse('OK')
def test_list_artist_with_query_of_one_token(self):
self.send_request('list "artist" "anartist"')
self.assertEqualResponse(
'ACK [2@0] {list} should be "Album" for 3 arguments')
def test_list_artist_with_unknown_field_in_query_returns_ack(self):
self.send_request('list "artist" "foo" "bar"')
self.assertEqualResponse('ACK [2@0] {list} not able to parse args')
def test_list_artist_by_artist(self):
self.send_request('list "artist" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_artist_by_album(self):
self.send_request('list "artist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_artist_by_full_date(self):
self.send_request('list "artist" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_artist_by_year(self):
self.send_request('list "artist" "date" "2001"')
self.assertInResponse('OK')
def test_list_artist_by_genre(self):
self.send_request('list "artist" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_artist_by_artist_and_album(self):
self.send_request(
'list "artist" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_artist_without_filter_value(self):
self.send_request('list "artist" "artist" ""')
self.assertInResponse('OK')
def test_list_artist_should_not_return_artists_without_names(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(artists=[Artist(name='')])])
self.send_request('list "artist"')
self.assertNotInResponse('Artist: ')
self.assertInResponse('OK')
# Albumartist
def test_list_albumartist_with_quotes(self):
self.send_request('list "albumartist"')
self.assertInResponse('OK')
def test_list_albumartist_without_quotes(self):
self.send_request('list albumartist')
self.assertInResponse('OK')
def test_list_albumartist_without_quotes_and_capitalized(self):
self.send_request('list Albumartist')
self.assertInResponse('OK')
def test_list_albumartist_with_query_of_one_token(self):
self.send_request('list "albumartist" "anartist"')
self.assertEqualResponse(
'ACK [2@0] {list} should be "Album" for 3 arguments')
def test_list_albumartist_with_unknown_field_in_query_returns_ack(self):
self.send_request('list "albumartist" "foo" "bar"')
self.assertEqualResponse('ACK [2@0] {list} not able to parse args')
def test_list_albumartist_by_artist(self):
self.send_request('list "albumartist" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_albumartist_by_album(self):
self.send_request('list "albumartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_albumartist_by_full_date(self):
self.send_request('list "albumartist" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_albumartist_by_year(self):
self.send_request('list "albumartist" "date" "2001"')
self.assertInResponse('OK')
def test_list_albumartist_by_genre(self):
self.send_request('list "albumartist" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_albumartist_by_artist_and_album(self):
self.send_request(
'list "albumartist" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_albumartist_without_filter_value(self):
self.send_request('list "albumartist" "artist" ""')
self.assertInResponse('OK')
def test_list_albumartist_should_not_return_artists_without_names(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(album=Album(artists=[Artist(name='')]))])
self.send_request('list "albumartist"')
self.assertNotInResponse('Artist: ')
self.assertNotInResponse('Albumartist: ')
self.assertNotInResponse('Composer: ')
self.assertNotInResponse('Performer: ')
self.assertInResponse('OK')
# Composer
def test_list_composer_with_quotes(self):
self.send_request('list "composer"')
self.assertInResponse('OK')
def test_list_composer_without_quotes(self):
self.send_request('list composer')
self.assertInResponse('OK')
def test_list_composer_without_quotes_and_capitalized(self):
self.send_request('list Composer')
self.assertInResponse('OK')
def test_list_composer_with_query_of_one_token(self):
self.send_request('list "composer" "anartist"')
self.assertEqualResponse(
'ACK [2@0] {list} should be "Album" for 3 arguments')
def test_list_composer_with_unknown_field_in_query_returns_ack(self):
self.send_request('list "composer" "foo" "bar"')
self.assertEqualResponse('ACK [2@0] {list} not able to parse args')
def test_list_composer_by_artist(self):
self.send_request('list "composer" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_composer_by_album(self):
self.send_request('list "composer" "album" "analbum"')
self.assertInResponse('OK')
def test_list_composer_by_full_date(self):
self.send_request('list "composer" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_composer_by_year(self):
self.send_request('list "composer" "date" "2001"')
self.assertInResponse('OK')
def test_list_composer_by_genre(self):
self.send_request('list "composer" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_composer_by_artist_and_album(self):
self.send_request(
'list "composer" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_composer_without_filter_value(self):
self.send_request('list "composer" "artist" ""')
self.assertInResponse('OK')
def test_list_composer_should_not_return_artists_without_names(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(composers=[Artist(name='')])])
self.send_request('list "composer"')
self.assertNotInResponse('Artist: ')
self.assertNotInResponse('Albumartist: ')
self.assertNotInResponse('Composer: ')
self.assertNotInResponse('Performer: ')
self.assertInResponse('OK')
# Performer
def test_list_performer_with_quotes(self):
self.send_request('list "performer"')
self.assertInResponse('OK')
def test_list_performer_without_quotes(self):
self.send_request('list performer')
self.assertInResponse('OK')
def test_list_performer_without_quotes_and_capitalized(self):
self.send_request('list Albumartist')
self.assertInResponse('OK')
def test_list_performer_with_query_of_one_token(self):
self.send_request('list "performer" "anartist"')
self.assertEqualResponse(
'ACK [2@0] {list} should be "Album" for 3 arguments')
def test_list_performer_with_unknown_field_in_query_returns_ack(self):
self.send_request('list "performer" "foo" "bar"')
self.assertEqualResponse('ACK [2@0] {list} not able to parse args')
def test_list_performer_by_artist(self):
self.send_request('list "performer" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_performer_by_album(self):
self.send_request('list "performer" "album" "analbum"')
self.assertInResponse('OK')
def test_list_performer_by_full_date(self):
self.send_request('list "performer" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_performer_by_year(self):
self.send_request('list "performer" "date" "2001"')
self.assertInResponse('OK')
def test_list_performer_by_genre(self):
self.send_request('list "performer" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_performer_by_artist_and_album(self):
self.send_request(
'list "performer" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_performer_without_filter_value(self):
self.send_request('list "performer" "artist" ""')
self.assertInResponse('OK')
def test_list_performer_should_not_return_artists_without_names(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(performers=[Artist(name='')])])
self.send_request('list "performer"')
self.assertNotInResponse('Artist: ')
self.assertNotInResponse('Albumartist: ')
self.assertNotInResponse('Composer: ')
self.assertNotInResponse('Performer: ')
self.assertInResponse('OK')
# Album
def test_list_album_with_quotes(self):
self.send_request('list "album"')
self.assertInResponse('OK')
def test_list_album_without_quotes(self):
self.send_request('list album')
self.assertInResponse('OK')
def test_list_album_without_quotes_and_capitalized(self):
self.send_request('list Album')
self.assertInResponse('OK')
def test_list_album_with_artist_name(self):
self.backend.library.dummy_get_distinct_result = {
'album': set(['foo'])}
self.send_request('list "album" "anartist"')
self.assertInResponse('Album: foo')
self.assertInResponse('OK')
def test_list_album_with_artist_name_without_filter_value(self):
self.send_request('list "album" ""')
self.assertInResponse('OK')
def test_list_album_by_artist(self):
self.send_request('list "album" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_album_by_album(self):
self.send_request('list "album" "album" "analbum"')
self.assertInResponse('OK')
def test_list_album_by_albumartist(self):
self.send_request('list "album" "albumartist" "anartist"')
self.assertInResponse('OK')
def test_list_album_by_composer(self):
self.send_request('list "album" "composer" "anartist"')
self.assertInResponse('OK')
def test_list_album_by_performer(self):
self.send_request('list "album" "performer" "anartist"')
self.assertInResponse('OK')
def test_list_album_by_full_date(self):
self.send_request('list "album" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_album_by_year(self):
self.send_request('list "album" "date" "2001"')
self.assertInResponse('OK')
def test_list_album_by_genre(self):
self.send_request('list "album" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_album_by_artist_and_album(self):
self.send_request(
'list "album" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_album_without_filter_value(self):
self.send_request('list "album" "artist" ""')
self.assertInResponse('OK')
def test_list_album_should_not_return_albums_without_names(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(album=Album(name=''))])
self.send_request('list "album"')
self.assertNotInResponse('Album: ')
self.assertInResponse('OK')
# Date
def test_list_date_with_quotes(self):
self.send_request('list "date"')
self.assertInResponse('OK')
def test_list_date_without_quotes(self):
self.send_request('list date')
self.assertInResponse('OK')
def test_list_date_without_quotes_and_capitalized(self):
self.send_request('list Date')
self.assertInResponse('OK')
def test_list_date_with_query_of_one_token(self):
self.send_request('list "date" "anartist"')
self.assertEqualResponse(
'ACK [2@0] {list} should be "Album" for 3 arguments')
def test_list_date_by_artist(self):
self.send_request('list "date" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_date_by_album(self):
self.send_request('list "date" "album" "analbum"')
self.assertInResponse('OK')
def test_list_date_by_full_date(self):
self.send_request('list "date" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_date_by_year(self):
self.send_request('list "date" "date" "2001"')
self.assertInResponse('OK')
def test_list_date_by_genre(self):
self.send_request('list "date" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_date_by_artist_and_album(self):
self.send_request('list "date" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_date_without_filter_value(self):
self.send_request('list "date" "artist" ""')
self.assertInResponse('OK')
def test_list_date_should_not_return_blank_dates(self):
self.backend.library.dummy_find_exact_result = SearchResult(
tracks=[Track(date='')])
self.send_request('list "date"')
self.assertNotInResponse('Date: ')
self.assertInResponse('OK')
# Genre
def test_list_genre_with_quotes(self):
self.send_request('list "genre"')
self.assertInResponse('OK')
def test_list_genre_without_quotes(self):
self.send_request('list genre')
self.assertInResponse('OK')
def test_list_genre_without_quotes_and_capitalized(self):
self.send_request('list Genre')
self.assertInResponse('OK')
def test_list_genre_with_query_of_one_token(self):
self.send_request('list "genre" "anartist"')
self.assertEqualResponse(
'ACK [2@0] {list} should be "Album" for 3 arguments')
def test_list_genre_by_artist(self):
self.send_request('list "genre" "artist" "anartist"')
self.assertInResponse('OK')
def test_list_genre_by_album(self):
self.send_request('list "genre" "album" "analbum"')
self.assertInResponse('OK')
def test_list_genre_by_full_date(self):
self.send_request('list "genre" "date" "2001-01-01"')
self.assertInResponse('OK')
def test_list_genre_by_year(self):
self.send_request('list "genre" "date" "2001"')
self.assertInResponse('OK')
def test_list_genre_by_genre(self):
self.send_request('list "genre" "genre" "agenre"')
self.assertInResponse('OK')
def test_list_genre_by_artist_and_album(self):
self.send_request(
'list "genre" "artist" "anartist" "album" "analbum"')
self.assertInResponse('OK')
def test_list_genre_without_filter_value(self):
self.send_request('list "genre" "artist" ""')
self.assertInResponse('OK')
class MusicDatabaseSearchTest(protocol.BaseTestCase):
def test_search(self):
self.backend.library.dummy_search_result = SearchResult(
albums=[Album(uri='dummy:album:a', name='A')],
artists=[Artist(uri='dummy:artist:b', name='B')],
tracks=[Track(uri='dummy:track:c', name='C')])
self.send_request('search "any" "foo"')
self.assertInResponse('file: dummy:album:a')
self.assertInResponse('Title: Album: A')
self.assertInResponse('file: dummy:artist:b')
self.assertInResponse('Title: Artist: B')
self.assertInResponse('file: dummy:track:c')
self.assertInResponse('Title: C')
self.assertInResponse('OK')
def test_search_album(self):
self.send_request('search "album" "analbum"')
self.assertInResponse('OK')
def test_search_album_without_quotes(self):
self.send_request('search album "analbum"')
self.assertInResponse('OK')
def test_search_album_without_filter_value(self):
self.send_request('search "album" ""')
self.assertInResponse('OK')
def test_search_artist(self):
self.send_request('search "artist" "anartist"')
self.assertInResponse('OK')
def test_search_artist_without_quotes(self):
self.send_request('search artist "anartist"')
self.assertInResponse('OK')
def test_search_artist_without_filter_value(self):
self.send_request('search "artist" ""')
self.assertInResponse('OK')
def test_search_albumartist(self):
self.send_request('search "albumartist" "analbumartist"')
self.assertInResponse('OK')
def test_search_albumartist_without_quotes(self):
self.send_request('search albumartist "analbumartist"')
self.assertInResponse('OK')
def test_search_albumartist_without_filter_value(self):
self.send_request('search "albumartist" ""')
self.assertInResponse('OK')
def test_search_composer(self):
self.send_request('search "composer" "acomposer"')
self.assertInResponse('OK')
def test_search_composer_without_quotes(self):
self.send_request('search composer "acomposer"')
self.assertInResponse('OK')
def test_search_composer_without_filter_value(self):
self.send_request('search "composer" ""')
self.assertInResponse('OK')
def test_search_performer(self):
self.send_request('search "performer" "aperformer"')
self.assertInResponse('OK')
def test_search_performer_without_quotes(self):
self.send_request('search performer "aperformer"')
self.assertInResponse('OK')
def test_search_performer_without_filter_value(self):
self.send_request('search "performer" ""')
self.assertInResponse('OK')
def test_search_filename(self):
self.send_request('search "filename" "afilename"')
self.assertInResponse('OK')
def test_search_filename_without_quotes(self):
self.send_request('search filename "afilename"')
self.assertInResponse('OK')
def test_search_filename_without_filter_value(self):
self.send_request('search "filename" ""')
self.assertInResponse('OK')
def test_search_file(self):
self.send_request('search "file" "afilename"')
self.assertInResponse('OK')
def test_search_file_without_quotes(self):
self.send_request('search file "afilename"')
self.assertInResponse('OK')
def test_search_file_without_filter_value(self):
self.send_request('search "file" ""')
self.assertInResponse('OK')
def test_search_title(self):
self.send_request('search "title" "atitle"')
self.assertInResponse('OK')
def test_search_title_without_quotes(self):
self.send_request('search title "atitle"')
self.assertInResponse('OK')
def test_search_title_without_filter_value(self):
self.send_request('search "title" ""')
self.assertInResponse('OK')
def test_search_any(self):
self.send_request('search "any" "anything"')
self.assertInResponse('OK')
def test_search_any_without_quotes(self):
self.send_request('search any "anything"')
self.assertInResponse('OK')
def test_search_any_without_filter_value(self):
self.send_request('search "any" ""')
self.assertInResponse('OK')
def test_search_track_no(self):
self.send_request('search "track" "10"')
self.assertInResponse('OK')
def test_search_track_no_without_quotes(self):
self.send_request('search track "10"')
self.assertInResponse('OK')
def test_search_track_no_without_filter_value(self):
self.send_request('search "track" ""')
self.assertInResponse('OK')
def test_search_genre(self):
self.send_request('search "genre" "agenre"')
self.assertInResponse('OK')
def test_search_genre_without_quotes(self):
self.send_request('search genre "agenre"')
self.assertInResponse('OK')
def test_search_genre_without_filter_value(self):
self.send_request('search "genre" ""')
self.assertInResponse('OK')
def test_search_date(self):
self.send_request('search "date" "2002-01-01"')
self.assertInResponse('OK')
def test_search_date_without_quotes(self):
self.send_request('search date "2002-01-01"')
self.assertInResponse('OK')
def test_search_date_with_capital_d_and_incomplete_date(self):
self.send_request('search Date "2005"')
self.assertInResponse('OK')
def test_search_date_without_filter_value(self):
self.send_request('search "date" ""')
self.assertInResponse('OK')
def test_search_comment(self):
self.send_request('search "comment" "acomment"')
self.assertInResponse('OK')
def test_search_comment_without_quotes(self):
self.send_request('search comment "acomment"')
self.assertInResponse('OK')
def test_search_comment_without_filter_value(self):
self.send_request('search "comment" ""')
self.assertInResponse('OK')
def test_search_else_should_fail(self):
self.send_request('search "sometype" "something"')
self.assertEqualResponse('ACK [2@0] {search} incorrect arguments')
|
repotvsupertuga/tvsupertuga.repository | refs/heads/master | instal/script.module.requests/lib/requests/packages/urllib3/contrib/appengine.py | 53 | """
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Example usage::
from urllib3 import PoolManager
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
if is_appengine_sandbox():
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
http = AppEngineManager()
else:
# PoolManager uses a socket-level API behind the scenes
http = PoolManager()
r = http.request('GET', 'https://google.com/')
There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
cost-effective in many circumstances as long as your usage is within the
limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
Sockets also have `limitations and restrictions
<https://cloud.google.com/appengine/docs/python/sockets/\
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
To use sockets, be sure to specify the following in your ``app.yaml``::
env_variables:
GAE_USE_SOCKETS_HTTPLIB : 'true'
3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""
from __future__ import absolute_import
import logging
import os
import warnings
from ..packages.six.moves.urllib.parse import urljoin
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
TimeoutError,
SSLError
)
from ..packages.six import BytesIO
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.timeout import Timeout
from ..util.retry import Retry
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation `here
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Notably it will raise an :class:`AppEnginePlatformError` if:
* URLFetch is not available.
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabtyes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(self, headers=None, retries=None, validate_certificate=True,
urlfetch_retries=True):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment.")
if is_prod_appengine_mvms():
raise AppEnginePlatformError(
"Use normal urllib3.PoolManager instead of AppEngineManager"
"on Managed VMs, as using URLFetch is not necessary in "
"this environment.")
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/latest/contrib.html.",
AppEnginePlatformWarning)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.urlfetch_retries = urlfetch_retries
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(self, method, url, body=None, headers=None,
retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw):
retries = self._get_retries(retries, redirect)
try:
follow_redirects = (
redirect and
retries.redirect != 0 and
retries.total)
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=self.urlfetch_retries and follow_redirects,
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if 'too large' in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.", e)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if 'Too many redirects' in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.", e)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e)
http_response = self._urlfetch_response_to_http_response(
response, retries=retries, **response_kw)
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
if (self.urlfetch_retries and retries.raise_on_redirect):
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
method = 'GET'
try:
retries = retries.increment(method, url, response=http_response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
return http_response
retries.sleep_for_retry(http_response)
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
method, redirect_url, body, headers,
retries=retries, redirect=redirect,
timeout=timeout, **response_kw)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.getheader('Retry-After'))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(
method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
method, url,
body=body, headers=headers,
retries=retries, redirect=redirect,
timeout=timeout, **response_kw)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get('content-encoding')
if content_encoding == 'deflate':
del urlfetch_resp.headers['content-encoding']
transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == 'chunked':
encodings = transfer_encoding.split(",")
encodings.remove('chunked')
urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)
return HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
if isinstance(timeout, Timeout):
if timeout._read is not None or timeout._connect is not None:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
AppEnginePlatformWarning)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(
retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning)
return retries
def is_appengine():
return (is_local_appengine() or
is_prod_appengine() or
is_prod_appengine_mvms())
def is_appengine_sandbox():
return is_appengine() and not is_prod_appengine_mvms()
def is_local_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Development/' in os.environ['SERVER_SOFTWARE'])
def is_prod_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
not is_prod_appengine_mvms())
def is_prod_appengine_mvms():
return os.environ.get('GAE_VM', False) == 'true'
|
shmir/IxNetwork | refs/heads/master | docs/conf.py | 2 | # -*- coding: utf-8 -*-
#
# ixnetwork documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 23 15:59:52 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
import pkg_resources
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pyixnetwork'
copyright = u'2018, yoram@ignissoft.com'
author = u'yoram@ignissoft.com'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = pkg_resources.get_distribution(project).version
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ixnetworkdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ixnetwork.tex', u'ixnetwork Documentation',
u'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ixnetwork', u'ixnetwork Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ixnetwork', u'ixnetwork Documentation',
author, 'ixnetwork', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
|
akash1808/nova | refs/heads/master | nova/api/openstack/compute/contrib/flavor_swap.py | 79 | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Flavor Swap API extension."""
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
authorize = extensions.soft_extension_authorizer('compute', 'flavor_swap')
class FlavorSwapController(wsgi.Controller):
def _extend_flavors(self, req, flavors):
for flavor in flavors:
db_flavor = req.get_db_flavor(flavor['id'])
key = 'swap'
flavor[key] = db_flavor['swap'] or ""
def _show(self, req, resp_obj):
if not authorize(req.environ['nova.context']):
return
if 'flavor' in resp_obj.obj:
self._extend_flavors(req, [resp_obj.obj['flavor']])
@wsgi.extends
def show(self, req, resp_obj, id):
return self._show(req, resp_obj)
@wsgi.extends(action='create')
def create(self, req, resp_obj, body):
return self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
if not authorize(req.environ['nova.context']):
return
self._extend_flavors(req, list(resp_obj.obj['flavors']))
class Flavor_swap(extensions.ExtensionDescriptor):
"""Support to show the swap status of a flavor."""
name = "FlavorSwap"
alias = "os-flavor-swap"
namespace = ("http://docs.openstack.org/compute/ext/"
"flavor_swap/api/v1.1")
updated = "2012-08-29T00:00:00Z"
def get_controller_extensions(self):
controller = FlavorSwapController()
extension = extensions.ControllerExtension(self, 'flavors', controller)
return [extension]
|
vitan/django | refs/heads/master | tests/field_defaults/tests.py | 405 | from datetime import datetime
from django.test import TestCase
from django.utils import six
from .models import Article
class DefaultTests(TestCase):
def test_field_defaults(self):
a = Article()
now = datetime.now()
a.save()
self.assertIsInstance(a.id, six.integer_types)
self.assertEqual(a.headline, "Default headline")
self.assertLess((now - a.pub_date).seconds, 5)
|
Vogtinator/micropython | refs/heads/nspire | tests/bytecode/mp-tests/class1.py | 22 | class C:
pass
C()
|
ging/horizon | refs/heads/master | openstack_dashboard/dashboards/project/volumes/backups/tests.py | 33 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from django.utils.http import urlencode
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:volumes:index')
VOLUME_BACKUPS_TAB_URL = reverse('horizon:project:volumes:backups_tab')
class VolumeBackupsViewTests(test.TestCase):
@test.create_stubs({api.cinder: ('volume_backup_create',)})
def test_create_backup_post(self):
volume = self.volumes.first()
backup = self.cinder_volume_backups.first()
api.cinder.volume_backup_create(IsA(http.HttpRequest),
volume.id,
backup.container_name,
backup.name,
backup.description) \
.AndReturn(backup)
self.mox.ReplayAll()
formData = {'method': 'CreateBackupForm',
'tenant_id': self.tenant.id,
'volume_id': volume.id,
'container_name': backup.container_name,
'name': backup.name,
'description': backup.description}
url = reverse('horizon:project:volumes:volumes:create_backup',
args=[volume.id])
res = self.client.post(url, formData)
self.assertNoFormErrors(res)
self.assertMessageCount(error=0, warning=0)
self.assertRedirectsNoFollow(res, VOLUME_BACKUPS_TAB_URL)
@test.create_stubs({api.cinder: ('volume_list',
'volume_backup_supported',
'volume_backup_list',
'volume_backup_delete')})
def test_delete_volume_backup(self):
vol_backups = self.cinder_volume_backups.list()
volumes = self.cinder_volumes.list()
backup = self.cinder_volume_backups.first()
api.cinder.volume_backup_supported(IsA(http.HttpRequest)). \
MultipleTimes().AndReturn(True)
api.cinder.volume_backup_list(IsA(http.HttpRequest)). \
AndReturn(vol_backups)
api.cinder.volume_list(IsA(http.HttpRequest)). \
AndReturn(volumes)
api.cinder.volume_backup_delete(IsA(http.HttpRequest), backup.id)
api.cinder.volume_backup_list(IsA(http.HttpRequest)). \
AndReturn(vol_backups)
api.cinder.volume_list(IsA(http.HttpRequest)). \
AndReturn(volumes)
self.mox.ReplayAll()
formData = {'action':
'volume_backups__delete__%s' % backup.id}
res = self.client.post(INDEX_URL +
"?tab=volumes_and_snapshots__backups_tab",
formData, follow=True)
self.assertIn("Scheduled deletion of Volume Backup: backup1",
[m.message for m in res.context['messages']])
@test.create_stubs({api.cinder: ('volume_backup_get', 'volume_get')})
def test_volume_backup_detail_get(self):
backup = self.cinder_volume_backups.first()
volume = self.cinder_volumes.get(id=backup.volume_id)
api.cinder.volume_backup_get(IsA(http.HttpRequest), backup.id). \
AndReturn(backup)
api.cinder.volume_get(IsA(http.HttpRequest), backup.volume_id). \
AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:backups:detail',
args=[backup.id])
res = self.client.get(url)
self.assertContains(res,
"<h1>Volume Backup Details: %s</h1>" %
backup.name,
1, 200)
self.assertContains(res, "<dd>%s</dd>" % backup.name, 1, 200)
self.assertContains(res, "<dd>%s</dd>" % backup.id, 1, 200)
self.assertContains(res, "<dd>Available</dd>", 1, 200)
self.assertContains(res, "<dt>Volume</dt>", 1, 200)
@test.create_stubs({api.cinder: ('volume_backup_get',)})
def test_volume_backup_detail_get_with_exception(self):
# Test to verify redirect if get volume backup fails
backup = self.cinder_volume_backups.first()
api.cinder.volume_backup_get(IsA(http.HttpRequest), backup.id).\
AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:backups:detail',
args=[backup.id])
res = self.client.get(url)
self.assertNoFormErrors(res)
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.cinder: ('volume_backup_get', 'volume_get')})
def test_volume_backup_detail_with_missing_volume(self):
# Test to check page still loads even if volume is deleted
backup = self.cinder_volume_backups.first()
api.cinder.volume_backup_get(IsA(http.HttpRequest), backup.id). \
AndReturn(backup)
api.cinder.volume_get(IsA(http.HttpRequest), backup.volume_id). \
AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:backups:detail',
args=[backup.id])
res = self.client.get(url)
self.assertContains(res,
"<h1>Volume Backup Details: %s</h1>" %
backup.name,
1, 200)
self.assertContains(res, "<dd>%s</dd>" % backup.name, 1, 200)
self.assertContains(res, "<dd>%s</dd>" % backup.id, 1, 200)
self.assertContains(res, "<dd>Available</dd>", 1, 200)
self.assertContains(res, "<dt>Volume</dt>", 0, 200)
@test.create_stubs({api.cinder: ('volume_list',
'volume_backup_restore',)})
def test_restore_backup(self):
backup = self.cinder_volume_backups.first()
volumes = self.cinder_volumes.list()
api.cinder.volume_list(IsA(http.HttpRequest)). \
AndReturn(volumes)
api.cinder.volume_backup_restore(IsA(http.HttpRequest),
backup.id,
backup.volume_id). \
AndReturn(backup)
self.mox.ReplayAll()
formData = {'method': 'RestoreBackupForm',
'backup_id': backup.id,
'backup_name': backup.name,
'volume_id': backup.volume_id}
url = reverse('horizon:project:volumes:backups:restore',
args=[backup.id])
url += '?%s' % urlencode({'backup_name': backup.name,
'volume_id': backup.volume_id})
res = self.client.post(url, formData)
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, INDEX_URL)
|
Nicop06/ansible | refs/heads/devel | lib/ansible/modules/network/aos/aos_blueprint.py | 19 | #!/usr/bin/python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_blueprint
author: jeremy@apstra.com (@jeremyschulman)
version_added: "2.3"
short_description: Manage AOS blueprint instance
description:
- Apstra AOS Blueprint module let you manage your Blueprint easily. You can create
create and delete Blueprint by Name or ID. You can also use it to retrieve
all data from a blueprint. This module is idempotent
and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
name:
description:
- Name of the Blueprint to manage.
Only one of I(name) or I(id) can be set.
id:
description:
- AOS Id of the IP Pool to manage (can't be used to create a new IP Pool).
Only one of I(name) or I(id) can be set.
state:
description:
- Indicate what is the expected state of the Blueprint.
choices: ['present', 'absent', 'build-ready']
default: present
timeout:
description:
- When I(state=build-ready), this timeout identifies timeout in seconds to wait before
declaring a failure.
default: 5
template:
description:
- When creating a blueprint, this value identifies, by name, an existing engineering
design template within the AOS-server.
reference_arch:
description:
- When creating a blueprint, this value identifies a known AOS reference
architecture value. I(Refer to AOS-server documentation for available values).
'''
EXAMPLES = '''
- name: Creating blueprint
aos_blueprint:
session: "{{ aos_session }}"
name: "my-blueprint"
template: "my-template"
reference_arch: two_stage_l3clos
state: present
- name: Access a blueprint and get content
aos_blueprint:
session: "{{ aos_session }}"
name: "{{ blueprint_name }}"
template: "{{ blueprint_template }}"
state: present
register: bp
- name: Delete a blueprint
aos_blueprint:
session: "{{ aos_session }}"
name: "my-blueprint"
state: absent
- name: Await blueprint build-ready, and obtain contents
aos_blueprint:
session: "{{ aos_session }}"
name: "{{ blueprint_name }}"
state: build-ready
register: bp
'''
RETURNS = '''
name:
description: Name of the Blueprint
returned: always
type: str
sample: My-Blueprint
id:
description: AOS unique ID assigned to the Blueprint
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Information about the Blueprint
returned: always
type: dict
sample: {'...'}
contents:
description: Blueprint contents data-dictionary
returned: always
type: dict
sample: { ... }
build_errors:
description: When state='build-ready', and build errors exist, this contains list of errors
returned: only when build-ready returns fail
type: list
sample: [{...}, {...}]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.aos import get_aos_session, check_aos_version, find_collection_item
from ansible.module_utils.pycompat24 import get_exception
def create_blueprint(module, aos, name):
margs = module.params
try:
template_id = aos.DesignTemplates[margs['template']].id
# Create a new Object based on the name
blueprint = aos.Blueprints[name]
blueprint.create(template_id, reference_arch=margs['reference_arch'])
except:
exc = get_exception()
msg = "Unable to create blueprint: %s" % exc.message
if 'UNPROCESSABLE ENTITY' in exc.message:
msg+= ' (likely missing dependencies)'
module.fail_json(msg=msg)
return blueprint
def ensure_absent(module, aos, blueprint):
if blueprint.exists is False:
module.exit_json(changed=False)
else:
if not module.check_mode:
try:
blueprint.delete()
except:
exc = get_exception()
module.fail_json(msg='Unable to delete blueprint, %s' % exc.message)
module.exit_json(changed=True,
id=blueprint.id,
name=blueprint.name)
def ensure_present(module, aos, blueprint):
margs = module.params
if blueprint.exists:
module.exit_json(changed=False,
id=blueprint.id,
name=blueprint.name,
value=blueprint.value,
contents=blueprint.contents)
else:
# Check if template is defined and is valid
if margs['template'] is None:
module.fail_json(msg="You must define a 'template' name to create a new blueprint, currently missing")
elif aos.DesignTemplates.find(label=margs['template']) is None:
module.fail_json(msg="You must define a Valid 'template' name to create a new blueprint, %s is not valid" % margs['template'])
# Check if reference_arch
if margs['reference_arch'] is None:
module.fail_json(msg="You must define a 'reference_arch' to create a new blueprint, currently missing")
if not module.check_mode:
blueprint = create_blueprint(module, aos, margs['name'])
module.exit_json(changed=True,
id=blueprint.id,
name=blueprint.name,
value=blueprint.value,
contents=blueprint.contents)
else:
module.exit_json(changed=True,
name=margs['name'])
def ensure_build_ready(module, aos, blueprint):
margs = module.params
if not blueprint.exists:
module.fail_json(msg='blueprint %s does not exist' % blueprint.name)
if blueprint.await_build_ready(timeout=margs['timeout']*1000):
module.exit_json(contents=blueprint.contents)
else:
module.fail_json(msg='blueprint %s has build errors',
build_erros=blueprint.build_errors)
def aos_blueprint(module):
margs = module.params
try:
aos = get_aos_session(module, margs['session'])
except:
module.fail_json(msg="Unable to login to the AOS server")
item_name = False
item_id = False
if margs['name'] is not None:
item_name = margs['name']
elif margs['id'] is not None:
item_id = margs['id']
#----------------------------------------------------
# Find Object if available based on ID or Name
#----------------------------------------------------
try:
my_blueprint = find_collection_item(aos.Blueprints,
item_name=item_name,
item_id=item_id)
except:
module.fail_json(msg="Unable to find the Blueprint based on name or ID, something went wrong")
#----------------------------------------------------
# Proceed based on State value
#----------------------------------------------------
if margs['state'] == 'absent':
ensure_absent(module, aos, my_blueprint)
elif margs['state'] == 'present':
ensure_present(module, aos, my_blueprint)
elif margs['state'] == 'build-ready':
ensure_build_ready(module, aos, my_blueprint)
def main():
module = AnsibleModule(
argument_spec=dict(
session=dict(required=True, type="dict"),
name=dict(required=False),
id=dict(required=False ),
state=dict(choices=[
'present', 'absent', 'build-ready'],
default='present'),
timeout=dict(type="int", default=5),
template=dict(required=False),
reference_arch=dict(required=False)
),
mutually_exclusive = [('name', 'id')],
required_one_of=[('name', 'id')],
supports_check_mode=True
)
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.0')
aos_blueprint(module)
if __name__ == '__main__':
main()
|
mariianna/kodi | refs/heads/master | pelisalacarta/channels/sesionvip.py | 3 | # -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Canal para sesionvip
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
import scrapertools
import megavideo
import servertools
import binascii
import xbmctools
import config
import logger
CHANNELNAME = "sesionvip"
# Esto permite su ejecución en modo emulado
try:
pluginhandle = int( sys.argv[ 1 ] )
except:
pluginhandle = ""
# Traza el inicio del canal
logger.info("[sesionvip.py] init")
DEBUG = True
def mainlist(params,url,category):
logger.info("[sesionvip.py] mainlist")
xbmctools.addnewfolder( CHANNELNAME , "newlist" , category , "Novedades","http://www.sesionvip.com/","","")
xbmctools.addnewfolder( CHANNELNAME , "search" , category , "Buscar","","","")
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def newlist(params,url,category):
logger.info("[sesionvip.py] newlist")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
'''
<div class="entry"><!-- Entry -->
<h3 class="post-title">
<a href="http://www.sesionvip.com/ver-online-millennium-2-la-chica-que-sonaba-con-una-cerilla-y-un-bidon-de-gasolina-en-alta-calidad" rel="bookmark">ver online millennium 2 La Chica Que Soñaba Con Una Cerilla Y Un Bidón De Gasolina en alta calidad</a>
</h3>
<p style="text-align: center;">YA DISPONIBLE – CALIDAD TS-SCREENER ALTO</p>
<p style="text-align: center;"><img class="aligncenter size-medium wp-image-9125" title="peliculas online" src="http://www.sesionvip.com/wp-content/uploads/2009/08/1843318212-222x300.jpg" alt="peliculas online" width="222" height="300" /></p>
<p style="text-align: left;"> <a href="http://www.sesionvip.com/ver-online-millennium-2-la-chica-que-sonaba-con-una-cerilla-y-un-bidon-de-gasolina-en-alta-calidad#more-9124" class="more-link">PULSA AQUI PARA <strong>Ver la pelicula online</strong></a></p>
<div id="postmeta" class="postmetabox">
Categoria: <a href="http://www.sesionvip.com/category/estrenos-online" title="Ver todas las entradas en Estrenos Online" rel="category tag">Estrenos Online</a> <br/><a href="http://www.sesionvip.com/ver-online-millennium-2-la-chica-que-sonaba-con-una-cerilla-y-un-bidon-de-gasolina-en-alta-calidad#comments" title="Comentarios en ver online millennium 2 La Chica Que Soñaba Con Una Cerilla Y Un Bidón De Gasolina en alta calidad"><strong>Comments (3)</strong></a>
</div>
</div><!--/entry-->
'''
# Extrae las entradas (carpetas)
patronvideos = '<div class="entry"><!-- Entry -->(.*?)<!--/entry-->'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
logger.info("match="+match)
nuevopatron = '<a href="([^"]+)" rel="bookmark">([^<]+)</a>'#<img.*?src="([^"]+)"'
nuevomatches = re.compile(nuevopatron,re.DOTALL).findall(match)
logger.info("len(nuevomatches)=%d" % len(nuevomatches))
scrapertools.printMatches(nuevomatches)
# Titulo
scrapedtitle = nuevomatches[0][1]
if not scrapedtitle.startswith("Descargar"):
#Elimina todos los prefijos SEO
scrapedtitle = scrapertools.unseo(scrapedtitle)
# URL
scrapedurl = urlparse.urljoin(url,nuevomatches[0][0])
# Thumbnail
scrapedthumbnail = ""#urlparse.urljoin(url,nuevomatches[2])
# Argumento
scrapedplot = ""
# Depuracion
if (DEBUG):
logger.info("scrapedtitle="+scrapedtitle)
logger.info("scrapedurl="+scrapedurl)
logger.info("scrapedthumbnail="+scrapedthumbnail)
# Añade al listado de XBMC
xbmctools.addthumbnailfolder( CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail, "listmirrors" )
# Página siguiente
patronvideos = '<div class="back"><a href="([^"]+)"'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
# Titulo
scrapedtitle = "Página siguiente"
# URL
scrapedurl = urlparse.urljoin(url,match)
# Thumbnail
scrapedthumbnail = ""
# Argumento
scrapedplot = ""
# Depuracion
if (DEBUG):
logger.info("scrapedtitle="+scrapedtitle)
logger.info("scrapedurl="+scrapedurl)
logger.info("scrapedthumbnail="+scrapedthumbnail)
# Añade al listado de XBMC
xbmctools.addthumbnailfolder( CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail, "newlist" )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def listmirrors(params,url,category):
logger.info("[sesionvip.py] detail")
title = params.get("title")
thumbnail = params.get("thumbnail")
logger.info("[sesionvip.py] title="+title)
logger.info("[sesionvip.py] thumbnail="+thumbnail)
'''
# Descarga la página y extrae el enlace a la siguiente pagina
data = scrapertools.cachePage(url)
patronvideos = '<p style="text-align: center;">.*?<a href\="(http\://www.sesionvip.com/[^"]+)"'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
#logger.info(data)
if len(matches)==0:
xbmctools.alertnodisponible()
return
# Descarga la siguiente página y extrae el enlace a los mirrors
url = matches[0]
'''
data = scrapertools.cachePage(url)
# ------------------------------------------------------------------------------------
# Busca los enlaces a los videos
# ------------------------------------------------------------------------------------
listavideos = servertools.findvideos(data)
for video in listavideos:
xbmctools.addvideo( CHANNELNAME , video[0] , video[1] , category , video[2] )
# ------------------------------------------------------------------------------------
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def search(params,url,category):
logger.info("[sesionvip.py] search")
keyboard = xbmc.Keyboard('')
keyboard.doModal()
if (keyboard.isConfirmed()):
tecleado = keyboard.getText()
if len(tecleado)>0:
#convert to HTML
tecleado = tecleado.replace(" ", "+")
searchUrl = "http://www.sesionvip.com/?s="+tecleado
searchresults(params,searchUrl,category)
def performsearch(texto):
logger.info("[sesionvip.py] performsearch")
url = "http://www.sesionvip.com/?s="+texto
# Descarga la página
data = scrapertools.cachePage(url)
# Extrae las entradas (carpetas)
patronvideos = '<div class="entry">.*?'
patronvideos += '<a href="([^"]+)" rel="bookmark">([^<]+)</a>'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
resultados = []
for match in matches:
# Titulo
scrapedtitle = match[1]
if not scrapedtitle.startswith("Descargar"):
scrapedtitle = scrapertools.unseo(scrapedtitle)
scrapedurl = urlparse.urljoin(url,match[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
resultados.append( [CHANNELNAME , "listmirrors" , "buscador" , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot ] )
return resultados
def searchresults(params,url,category):
logger.info("[sesionvip.py] searchresults")
# Descarga la página
data = scrapertools.cachePage(url)
patronvideos = '<div class="entry">.*?'
patronvideos += '<a href="([^"]+)" rel="bookmark">([^<]+)</a>'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
# Titulo
scrapedtitle = match[1]
if not scrapedtitle.startswith("Descargar"):
#Elimina todos los prefijos SEO
scrapedtitle = scrapertools.unseo(scrapedtitle)
# URL
scrapedurl = urlparse.urljoin(url,match[0])
# Thumbnail
scrapedthumbnail = ""
# Argumento
scrapedplot = ""
# Depuracion
if (DEBUG):
logger.info("scrapedtitle="+scrapedtitle)
logger.info("scrapedurl="+scrapedurl)
logger.info("scrapedthumbnail="+scrapedthumbnail)
# Añade al listado de XBMC
xbmctools.addthumbnailfolder( CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail, "listmirrors" )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def play(params,url,category):
logger.info("[sesionvip.py] play")
title = unicode( xbmc.getInfoLabel( "ListItem.Title" ), "utf-8" )
thumbnail = xbmc.getInfoImage( "ListItem.Thumb" )
plot = unicode( xbmc.getInfoLabel( "ListItem.Plot" ), "utf-8" )
server = params["server"]
logger.info("[sesionvip.py] thumbnail="+thumbnail)
logger.info("[sesionvip.py] server="+server)
xbmctools.play_video(CHANNELNAME,server,url,category,title,thumbnail,plot)
|
relicode/auth-chat | refs/heads/master | django/auth_chat/models.py | 10644 | from django.db import models
# Create your models here.
|
hendawy/drchrono-patient-education | refs/heads/master | pshare/sharebackend/urls.py | 1 | from django.conf.urls import patterns, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', 'sharebackend.views.home', name='home'),
url(r'^patients/$', 'sharebackend.views.list_patients',
name='list_patients'),
url(r'^group/add/new/$', 'sharebackend.views.new_group',
name='new_group'),
url(r'^group/file/share/$', 'sharebackend.views.file_share',
name='file_share'),
url(r'^group/(?P<tag_text>\w+)/$', 'sharebackend.views.get_group',
name='get_group'),
url(r'^group/remove/(?P<tag_text>\w+)/$', 'sharebackend.views.remove_group',
name='remove_group'),
url(r'^group/$', 'sharebackend.views.list_groups',
name='list_groups'),
url(r'^files/$', 'sharebackend.views.list_files',
name='list_files'),
url(r'^file/(?P<file_id>[0-9]+)/$', 'sharebackend.views.get_file',
name='get_file'),
url(r'^resource/test/$', 'sharebackend.views.test_resources',
name='test_resources'),
url(r'^landing/$', 'sharebackend.views.landing',
name='landing'),
url(r'^logout/$', 'sharebackend.views.logout_view',
name='logout'),
)
|
monapayjp/monapay | refs/heads/master | monapay/rpc.py | 1 | # -*- coding: utf-8 -*-
from decimal import Decimal
from django.conf import settings
import bitcoinrpc
def make_rpc_connection():
return bitcoinrpc.connect_to_remote(
settings.MONACOIND_ADMIN_USER,
settings.MONACOIND_ADMIN_PASSWORD,
settings.MONACOIND_HOST,
settings.MONACOIND_PORT)
def get_minconf(x):
if x <= Decimal("10.0"):
return 2
elif x <= Decimal("50.0"):
return 3
elif x <= Decimal("100.0"):
return 4
elif x <= Decimal("200.0"):
return 5
else:
return 6
|
mrquim/mrquimrepo | refs/heads/master | script.module.exodus/lib/resources/lib/sources/en/releasebb.py | 5 | # -*- coding: utf-8 -*-
'''
Exodus Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import control
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['rlsbb.com', 'rlsbb.ru']
self.base_link = 'http://rlsbb.ru'
self.search_base_link = 'http://search.rlsbb.ru'
self.search_cookie = 'serach_mode=rlsbb'
self.search_link = '/lib/search526049.php?phrase=%s&pindex=1&content=true'
self.search_link2 = '/search/%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() == False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
posts = []
if 'tvshowtitle' in data:
query = '%s %s S%02dE%02d' % (data['tvshowtitle'], int(data['year']), int(data['season']), int(data['episode']))
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
referer = self.search_link2 % urllib.quote_plus(query)
referer = urlparse.urljoin(self.search_base_link, referer)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.search_base_link, url)
result = client.request(url, cookie=self.search_cookie, XHR=True, referer=referer)
try: posts += json.loads(re.findall('({.+?})$', result)[0])['results']
except: pass
else:
query = '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
referer = self.search_link2 % urllib.quote_plus(query)
referer = urlparse.urljoin(self.search_base_link, referer)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.search_base_link, url)
result = client.request(url, cookie=self.search_cookie, XHR=True, referer=referer)
try: posts += json.loads(re.findall('({.+?})$', result)[0])['results']
except: pass
links = [] ; dupes = []
for post in posts:
try:
name = post['post_title'] ; url = post['post_name']
if not url in dupes:
dupes.append(url)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)
if not cleantitle.get(title) in cleantitle.get(t): raise Exception()
try: y = re.findall('[\.|\(|\[|\s](S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
except: y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if 'S' in y and 'E' in y: cat = 'episode'
elif 'S' in y: cat = 'tvshow'
elif y.isdigit(): cat = 'movie'
if cat == 'movie': hdlr = data['year']
elif cat == 'episode': hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode']))
elif cat == 'tvshow': hdlr = 'S%02d' % int(data['season'])
if not y == hdlr: raise Exception()
items = []
content = post['post_content']
try: items += zip([i for i in client.parseDOM(content, 'p') if 'Release Name:' in i], [i for i in client.parseDOM(content, 'p') if '<strong>Download' in i])
except: pass
try: items += client.parseDOM(content, 'p', attrs = {'style': '.+?'})
except: pass
for item in items:
try:
if type(item) == tuple: item = '######URL######'.join(item)
fmt = re.sub('(.+)(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*)(\.|\)|\]|\s)', '', name.upper())
fmt = re.split('\.|\(|\)|\[|\]|\s|\-', fmt)
fmt = [i.lower() for i in fmt]
if any(i.endswith(('subs', 'sub', 'dubbed', 'dub')) for i in fmt): raise Exception()
if any(i in ['extras'] for i in fmt): raise Exception()
if '1080p' in fmt: quality = '1080p'
elif '720p' in fmt: quality = '720p'
else: quality = 'SD'
if any(i in ['dvdscr', 'r5', 'r6'] for i in fmt): quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts'] for i in fmt): quality = 'CAM'
quality, infoo = source_utils.get_release_quality(name, i[1])
info = []
if '3d' in fmt: info.append('3D')
try:
if cat == 'tvshow': raise Exception()
size = re.findall('(\d+(?:\.|/,|)\d+(?:\s+|)(?:GB|GiB|MB|MiB))', item)[0].strip()
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
size = '%.2f GB' % size
info.append(size)
except:
pass
info = ' | '.join(info)
url = item.rsplit('######URL######')[-1]
url = zip(client.parseDOM(url, 'a'), client.parseDOM(url, 'a', ret='href'))
for i in url: links.append({'url': i[1], 'quality': quality, 'info': info, 'host': i[0], 'cat': cat})
except:
pass
except:
pass
check = [i for i in links if not i['quality'] == 'CAM']
if len(check) > 0: links = check
hostDict = hostprDict + hostDict
for i in links:
try:
url = i['url']
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
if i['cat'] == 'tvshow':
if not i['quality'] in ['1080p', 'HD']: raise Exception()
if not any(i['host'].lower() in x for x in hostDict): raise Exception()
url = client.request(url)
url = client.parseDOM(url, 'ol')[0]
url = client.parseDOM(url, 'div', attrs = {'style': '.+?'})[int(data['episode'])-1]
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': i['quality'], 'language': 'en', 'url': url, 'info': i['info'], 'direct': False, 'debridonly': True})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
|
theostoican/robocheck | refs/heads/master | coreutils/modulehandler.py | 3 | """modulehandler.py
General description:
This is the module that dynamically loads the classes for the tools
used to get the errors. The "getCompatibleModules" function loads
all the classes for the language specified and instanciates only
those which tool can search for the errors specified and which have
the tool installed.
(C) 2013, Andrei Tuicu <andrei.tuicu@gmail.com>
last review 26.08.2013
"""
import os
import sys
def getCompatibleModules(language, errorsToLookFor, platform):
returnPath = os.getcwd()
os.chdir("languages")
os.chdir(language)
allModules = os.listdir("modules")
allModules.sort()
os.chdir("modules")
sys.path.insert(0, ".")
compatibleModules = []
for module in allModules:
if ".pyc" in module:
allModules.remove(module)
for i in range(0, len(allModules)):
allModules[i] = allModules[i].split(".py")[0]
for module in allModules:
if module == "__init":
continue
tool = __import__(module)
toolClass = getattr(tool, module)
if toolClass.canHandleErrors(errorsToLookFor) and \
toolClass.toolIsInstalled(platform):
compatibleModules.append( toolClass() )
os.chdir(returnPath)
if len(compatibleModules) == 0:
return None
return compatibleModules
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.