repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
camradal/ansible-modules-extras | refs/heads/devel | notification/sns.py | 44 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Michael J. Schultz <mjschultz@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: sns
short_description: Send Amazon Simple Notification Service (SNS) messages
description:
- The M(sns) module sends notifications to a topic on your Amazon SNS account
version_added: 1.6
author: "Michael J. Schultz (@mjschultz)"
options:
msg:
description:
- Default message to send.
required: true
aliases: [ "default" ]
subject:
description:
- Subject line for email delivery.
required: false
topic:
description:
- The topic you want to publish to.
required: true
email:
description:
- Message to send to email-only subscription
required: false
sqs:
description:
- Message to send to SQS-only subscription
required: false
sms:
description:
- Message to send to SMS-only subscription
required: false
http:
description:
- Message to send to HTTP-only subscription
required: false
https:
description:
- Message to send to HTTPS-only subscription
required: false
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: None
aliases: ['ec2_secret_key', 'secret_key']
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: None
aliases: ['ec2_access_key', 'access_key']
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
requirements: [ "boto" ]
"""
EXAMPLES = """
- name: Send default notification message via SNS
local_action:
module: sns
msg: "{{ inventory_hostname }} has completed the play."
subject: "Deploy complete!"
topic: "deploy"
- name: Send notification messages via SNS with short message for SMS
local_action:
module: sns
msg: "{{ inventory_hostname }} has completed the play."
sms: "deployed!"
subject: "Deploy complete!"
topic: "deploy"
"""
import sys
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
try:
import boto
import boto.ec2
import boto.sns
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def arn_topic_lookup(connection, short_topic):
response = connection.get_all_topics()
result = response[u'ListTopicsResponse'][u'ListTopicsResult']
# topic names cannot have colons, so this captures the full topic name
lookup_topic = ':{}'.format(short_topic)
for topic in result[u'Topics']:
if topic[u'TopicArn'].endswith(lookup_topic):
return topic[u'TopicArn']
return None
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
msg=dict(type='str', required=True, aliases=['default']),
subject=dict(type='str', default=None),
topic=dict(type='str', required=True),
email=dict(type='str', default=None),
sqs=dict(type='str', default=None),
sms=dict(type='str', default=None),
http=dict(type='str', default=None),
https=dict(type='str', default=None),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
msg = module.params['msg']
subject = module.params['subject']
topic = module.params['topic']
email = module.params['email']
sqs = module.params['sqs']
sms = module.params['sms']
http = module.params['http']
https = module.params['https']
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="region must be specified")
try:
connection = connect_to_aws(boto.sns, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
# .publish() takes full ARN topic id, but I'm lazy and type shortnames
# so do a lookup (topics cannot contain ':', so thats the decider)
if ':' in topic:
arn_topic = topic
else:
arn_topic = arn_topic_lookup(connection, topic)
if not arn_topic:
module.fail_json(msg='Could not find topic: {}'.format(topic))
dict_msg = {'default': msg}
if email:
dict_msg.update(email=email)
if sqs:
dict_msg.update(sqs=sqs)
if sms:
dict_msg.update(sms=sms)
if http:
dict_msg.update(http=http)
if https:
dict_msg.update(https=https)
json_msg = json.dumps(dict_msg)
try:
connection.publish(topic=arn_topic, subject=subject,
message_structure='json', message=json_msg)
except boto.exception.BotoServerError, e:
module.fail_json(msg=str(e))
module.exit_json(msg="OK")
main()
|
tpodowd/boto | refs/heads/master | boto/dynamodb/schema.py | 185 | # Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
class Schema(object):
"""
Represents a DynamoDB schema.
:ivar hash_key_name: The name of the hash key of the schema.
:ivar hash_key_type: The DynamoDB type specification for the
hash key of the schema.
:ivar range_key_name: The name of the range key of the schema
or None if no range key is defined.
:ivar range_key_type: The DynamoDB type specification for the
range key of the schema or None if no range key is defined.
:ivar dict: The underlying Python dictionary that needs to be
passed to Layer1 methods.
"""
def __init__(self, schema_dict):
self._dict = schema_dict
def __repr__(self):
if self.range_key_name:
s = 'Schema(%s:%s)' % (self.hash_key_name, self.range_key_name)
else:
s = 'Schema(%s)' % self.hash_key_name
return s
@classmethod
def create(cls, hash_key, range_key=None):
"""Convenience method to create a schema object.
Example usage::
schema = Schema.create(hash_key=('foo', 'N'))
schema2 = Schema.create(hash_key=('foo', 'N'),
range_key=('bar', 'S'))
:type hash_key: tuple
:param hash_key: A tuple of (hash_key_name, hash_key_type)
:type range_key: tuple
:param hash_key: A tuple of (range_key_name, range_key_type)
"""
reconstructed = {
'HashKeyElement': {
'AttributeName': hash_key[0],
'AttributeType': hash_key[1],
}
}
if range_key is not None:
reconstructed['RangeKeyElement'] = {
'AttributeName': range_key[0],
'AttributeType': range_key[1],
}
instance = cls(None)
instance._dict = reconstructed
return instance
@property
def dict(self):
return self._dict
@property
def hash_key_name(self):
return self._dict['HashKeyElement']['AttributeName']
@property
def hash_key_type(self):
return self._dict['HashKeyElement']['AttributeType']
@property
def range_key_name(self):
name = None
if 'RangeKeyElement' in self._dict:
name = self._dict['RangeKeyElement']['AttributeName']
return name
@property
def range_key_type(self):
type = None
if 'RangeKeyElement' in self._dict:
type = self._dict['RangeKeyElement']['AttributeType']
return type
def __eq__(self, other):
return (self.hash_key_name == other.hash_key_name and
self.hash_key_type == other.hash_key_type and
self.range_key_name == other.range_key_name and
self.range_key_type == other.range_key_type)
|
ptisserand/ansible | refs/heads/devel | lib/ansible/modules/network/aireos/aireos_command.py | 73 | #!/usr/bin/python
#
# Copyright: Ansible Team
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: aireos_command
version_added: "2.4"
author: "James Mighion (@jmighion)"
short_description: Run commands on remote devices running Cisco WLC
description:
- Sends arbitrary commands to an aireos node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(aireos_config) to configure WLC devices.
extends_documentation_fragment: aireos
options:
commands:
description:
- List of commands to send to the remote aireos device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
aliases: ['waitfor']
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
default: all
choices: ['any', 'all']
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
default: 1
"""
EXAMPLES = """
tasks:
- name: run show sysinfo on remote devices
aireos_command:
commands: show sysinfo
- name: run show sysinfo and check to see if output contains Cisco Controller
aireos_command:
commands: show sysinfo
wait_for: result[0] contains 'Cisco Controller'
- name: run multiple commands on remote nodes
aireos_command:
commands:
- show sysinfo
- show interface summary
- name: run multiple commands and evaluate the output
aireos_command:
commands:
- show sysinfo
- show interface summary
wait_for:
- result[0] contains Cisco Controller
- result[1] contains Loopback0
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always apart from low level errors (such as action plugin)
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always apart from low level errors (such as action plugin)
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.network.aireos.aireos import run_commands
from ansible.module_utils.network.aireos.aireos import aireos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import ComplexList
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.six import string_types
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
answer=dict()
), module)
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='aireos_command does not support running config mode '
'commands. Please use aireos_config instead'
)
return commands
def main():
"""main entry point for module execution
"""
argument_spec = dict(
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(aireos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
})
module.exit_json(**result)
if __name__ == '__main__':
main()
|
fengbaicanhe/intellij-community | refs/heads/master | python/testData/resolve/multiFile/importSubmodule/ImportSubmodule.py | 83 | import p1.m1
p1.m1
# <ref> |
CamelBackNotation/CarnotKE | refs/heads/master | jyhton/lib-python/2.7/test/test_uu.py | 60 | """
Tests for uu module.
Nick Mathewson
"""
import unittest
from test import test_support
import sys, os, uu, cStringIO
import uu
plaintext = "The smooth-scaled python crept over the sleeping dog\n"
encodedtext = """\
M5&AE('-M;V]T:\"US8V%L960@<'ET:&]N(&-R97!T(&]V97(@=&AE('-L965P
(:6YG(&1O9PH """
encodedtextwrapped = "begin %03o %s\n" + encodedtext.replace("%", "%%") + "\n \nend\n"
class UUTest(unittest.TestCase):
def test_encode(self):
inp = cStringIO.StringIO(plaintext)
out = cStringIO.StringIO()
uu.encode(inp, out, "t1")
self.assertEqual(out.getvalue(), encodedtextwrapped % (0666, "t1"))
inp = cStringIO.StringIO(plaintext)
out = cStringIO.StringIO()
uu.encode(inp, out, "t1", 0644)
self.assertEqual(out.getvalue(), encodedtextwrapped % (0644, "t1"))
def test_decode(self):
inp = cStringIO.StringIO(encodedtextwrapped % (0666, "t1"))
out = cStringIO.StringIO()
uu.decode(inp, out)
self.assertEqual(out.getvalue(), plaintext)
inp = cStringIO.StringIO(
"UUencoded files may contain many lines,\n" +
"even some that have 'begin' in them.\n" +
encodedtextwrapped % (0666, "t1")
)
out = cStringIO.StringIO()
uu.decode(inp, out)
self.assertEqual(out.getvalue(), plaintext)
def test_truncatedinput(self):
inp = cStringIO.StringIO("begin 644 t1\n" + encodedtext)
out = cStringIO.StringIO()
try:
uu.decode(inp, out)
self.fail("No exception raised")
except uu.Error, e:
self.assertEqual(str(e), "Truncated input file")
def test_missingbegin(self):
inp = cStringIO.StringIO("")
out = cStringIO.StringIO()
try:
uu.decode(inp, out)
self.fail("No exception raised")
except uu.Error, e:
self.assertEqual(str(e), "No valid begin line found in input file")
class UUStdIOTest(unittest.TestCase):
def setUp(self):
self.stdin = sys.stdin
self.stdout = sys.stdout
def tearDown(self):
sys.stdin = self.stdin
sys.stdout = self.stdout
def test_encode(self):
sys.stdin = cStringIO.StringIO(plaintext)
sys.stdout = cStringIO.StringIO()
uu.encode("-", "-", "t1", 0666)
self.assertEqual(
sys.stdout.getvalue(),
encodedtextwrapped % (0666, "t1")
)
def test_decode(self):
sys.stdin = cStringIO.StringIO(encodedtextwrapped % (0666, "t1"))
sys.stdout = cStringIO.StringIO()
uu.decode("-", "-")
self.assertEqual(sys.stdout.getvalue(), plaintext)
class UUFileTest(unittest.TestCase):
def _kill(self, f):
# close and remove file
try:
f.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
pass
try:
os.unlink(f.name)
except (SystemExit, KeyboardInterrupt):
raise
except:
pass
def setUp(self):
self.tmpin = test_support.TESTFN + "i"
self.tmpout = test_support.TESTFN + "o"
def tearDown(self):
del self.tmpin
del self.tmpout
def test_encode(self):
fin = fout = None
try:
test_support.unlink(self.tmpin)
fin = open(self.tmpin, 'wb')
fin.write(plaintext)
fin.close()
fin = open(self.tmpin, 'rb')
fout = open(self.tmpout, 'w')
uu.encode(fin, fout, self.tmpin, mode=0644)
fin.close()
fout.close()
fout = open(self.tmpout, 'r')
s = fout.read()
fout.close()
self.assertEqual(s, encodedtextwrapped % (0644, self.tmpin))
# in_file and out_file as filenames
uu.encode(self.tmpin, self.tmpout, self.tmpin, mode=0644)
fout = open(self.tmpout, 'r')
s = fout.read()
fout.close()
self.assertEqual(s, encodedtextwrapped % (0644, self.tmpin))
finally:
self._kill(fin)
self._kill(fout)
def test_decode(self):
f = None
try:
test_support.unlink(self.tmpin)
f = open(self.tmpin, 'w')
f.write(encodedtextwrapped % (0644, self.tmpout))
f.close()
f = open(self.tmpin, 'r')
uu.decode(f)
f.close()
f = open(self.tmpout, 'r')
s = f.read()
f.close()
self.assertEqual(s, plaintext)
# XXX is there an xp way to verify the mode?
finally:
self._kill(f)
def test_decode_filename(self):
f = None
try:
test_support.unlink(self.tmpin)
f = open(self.tmpin, 'w')
f.write(encodedtextwrapped % (0644, self.tmpout))
f.close()
uu.decode(self.tmpin)
f = open(self.tmpout, 'r')
s = f.read()
f.close()
self.assertEqual(s, plaintext)
finally:
self._kill(f)
def test_decodetwice(self):
# Verify that decode() will refuse to overwrite an existing file
f = None
try:
f = cStringIO.StringIO(encodedtextwrapped % (0644, self.tmpout))
f = open(self.tmpin, 'r')
uu.decode(f)
f.close()
f = open(self.tmpin, 'r')
self.assertRaises(uu.Error, uu.decode, f)
f.close()
finally:
self._kill(f)
def test_main():
test_support.run_unittest(UUTest, UUStdIOTest, UUFileTest)
if __name__=="__main__":
test_main()
|
ramineni/myironic | refs/heads/master | ironic/dhcp/neutron.py | 1 | #
# Copyright 2014 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from neutronclient.common import exceptions as neutron_client_exc
from neutronclient.v2_0 import client as clientv20
from oslo.utils import netutils
from oslo_config import cfg
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LW
from ironic.common import keystone
from ironic.common import network
from ironic.dhcp import base
from ironic.drivers.modules import ssh
from ironic.openstack.common import log as logging
neutron_opts = [
cfg.StrOpt('url',
default='http://$my_ip:9696',
help='URL for connecting to neutron.'),
cfg.IntOpt('url_timeout',
default=30,
help='Timeout value for connecting to neutron in seconds.'),
cfg.IntOpt('retries',
default=3,
help='Client retries in the case of a failed request.'),
cfg.StrOpt('auth_strategy',
default='keystone',
help='Default authentication strategy to use when connecting '
'to neutron. Can be either "keystone" or "noauth". '
'Running neutron in noauth mode (related to but not '
'affected by this setting) is insecure and should only be '
'used for testing.')
]
CONF = cfg.CONF
CONF.import_opt('my_ip', 'ironic.netconf')
CONF.register_opts(neutron_opts, group='neutron')
LOG = logging.getLogger(__name__)
def _build_client(token=None):
"""Utility function to create Neutron client."""
params = {
'timeout': CONF.neutron.url_timeout,
'retries': CONF.neutron.retries,
'insecure': CONF.keystone_authtoken.insecure,
'ca_cert': CONF.keystone_authtoken.certfile,
}
if CONF.neutron.auth_strategy not in ['noauth', 'keystone']:
raise exception.ConfigInvalid(_('Neutron auth_strategy should be '
'either "noauth" or "keystone".'))
if CONF.neutron.auth_strategy == 'noauth':
params['endpoint_url'] = CONF.neutron.url
params['auth_strategy'] = 'noauth'
elif (CONF.neutron.auth_strategy == 'keystone' and
token is None):
params['endpoint_url'] = (CONF.neutron.url or
keystone.get_service_url('neutron'))
params['username'] = CONF.keystone_authtoken.admin_user
params['tenant_name'] = CONF.keystone_authtoken.admin_tenant_name
params['password'] = CONF.keystone_authtoken.admin_password
params['auth_url'] = (CONF.keystone_authtoken.auth_uri or '')
if CONF.keystone.region_name:
params['region_name'] = CONF.keystone.region_name
else:
params['token'] = token
params['endpoint_url'] = CONF.neutron.url
params['auth_strategy'] = None
return clientv20.Client(**params)
class NeutronDHCPApi(base.BaseDHCP):
"""API for communicating to neutron 2.x API."""
def update_port_dhcp_opts(self, port_id, dhcp_options, token=None):
"""Update a port's attributes.
Update one or more DHCP options on the specified port.
For the relevant API spec, see
http://docs.openstack.org/api/openstack-network/2.0/content/extra-dhc-opt-ext-update.html # noqa
:param port_id: designate which port these attributes
will be applied to.
:param dhcp_options: this will be a list of dicts, e.g.
::
[{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.456'},
{'opt_name': 'tftp-server',
'opt_value': '123.123.123.123'}]
:param token: optional auth token.
:raises: FailedToUpdateDHCPOptOnPort
"""
port_req_body = {'port': {'extra_dhcp_opts': dhcp_options}}
try:
_build_client(token).update_port(port_id, port_req_body)
except neutron_client_exc.NeutronClientException:
LOG.exception(_LE("Failed to update Neutron port %s."), port_id)
raise exception.FailedToUpdateDHCPOptOnPort(port_id=port_id)
def update_port_address(self, port_id, address, token=None):
"""Update a port's mac address.
:param port_id: Neutron port id.
:param address: new MAC address.
:param token: optional auth token.
:raises: FailedToUpdateMacOnPort
"""
port_req_body = {'port': {'mac_address': address}}
try:
_build_client(token).update_port(port_id, port_req_body)
except neutron_client_exc.NeutronClientException:
LOG.exception(_LE("Failed to update MAC address on Neutron "
"port %s."), port_id)
raise exception.FailedToUpdateMacOnPort(port_id=port_id)
def update_dhcp_opts(self, task, options):
"""Send or update the DHCP BOOT options for this node.
:param task: A TaskManager instance.
:param dhcp_opts: this will be a list of dicts, e.g.
::
[{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.456'},
{'opt_name': 'tftp-server',
'opt_value': '123.123.123.123'}]
"""
vifs = network.get_node_vif_ids(task)
if not vifs:
raise exception.FailedToUpdateDHCPOptOnPort(
_("No VIFs found for node %(node)s when attempting "
"to update DHCP BOOT options.") %
{'node': task.node.uuid})
failures = []
for port_id, port_vif in vifs.items():
try:
self.update_port_dhcp_opts(port_vif, options,
token=task.context.auth_token)
except exception.FailedToUpdateDHCPOptOnPort:
failures.append(port_id)
if failures:
if len(failures) == len(vifs):
raise exception.FailedToUpdateDHCPOptOnPort(_(
"Failed to set DHCP BOOT options for any port on node %s.")
% task.node.uuid)
else:
LOG.warning(_LW("Some errors were encountered when updating "
"the DHCP BOOT options for node %(node)s on "
"the following ports: %(ports)s."),
{'node': task.node.uuid, 'ports': failures})
# TODO(adam_g): Hack to workaround bug 1334447 until we have a
# mechanism for synchronizing events with Neutron. We need to sleep
# only if we are booting VMs, which is implied by SSHPower, to ensure
# they do not boot before Neutron agents have setup sufficent DHCP
# config for netboot.
if isinstance(task.driver.power, ssh.SSHPower):
LOG.debug("Waiting 15 seconds for Neutron.")
time.sleep(15)
def _get_fixed_ip_address(self, port_uuid, client):
"""Get a port's fixed ip address.
:param port_uuid: Neutron port id.
:param client: Neutron client instance.
:returns: Neutron port ip address.
:raises: FailedToGetIPAddressOnPort
:raises: InvalidIPv4Address
"""
ip_address = None
try:
neutron_port = client.show_port(port_uuid).get('port')
except neutron_client_exc.NeutronClientException:
LOG.exception(_LE("Failed to Get IP address on Neutron port %s."),
port_uuid)
raise exception.FailedToGetIPAddressOnPort(port_id=port_uuid)
fixed_ips = neutron_port.get('fixed_ips')
# NOTE(faizan) At present only the first fixed_ip assigned to this
# neutron port will be used, since nova allocates only one fixed_ip
# for the instance.
if fixed_ips:
ip_address = fixed_ips[0].get('ip_address', None)
if ip_address:
if netutils.is_valid_ipv4(ip_address):
return ip_address
else:
LOG.error(_LE("Neutron returned invalid IPv4 address %s."),
ip_address)
raise exception.InvalidIPv4Address(ip_address=ip_address)
else:
LOG.error(_LE("No IP address assigned to Neutron port %s."),
port_uuid)
raise exception.FailedToGetIPAddressOnPort(port_id=port_uuid)
def _get_port_ip_address(self, task, port_uuid, client):
"""Get ip address of ironic port assigned by neutron.
:param task: a TaskManager instance.
:param port_uuid: ironic Node's port UUID.
:param client: Neutron client instance.
:returns: Neutron port ip address associated with Node's port.
:raises: FailedToGetIPAddressOnPort
:raises: InvalidIPv4Address
"""
vifs = network.get_node_vif_ids(task)
if not vifs:
LOG.warning(_LW("No VIFs found for node %(node)s when attempting "
" to get port IP address."),
{'node': task.node.uuid})
raise exception.FailedToGetIPAddressOnPort(port_id=port_uuid)
port_vif = vifs[port_uuid]
port_ip_address = self._get_fixed_ip_address(port_vif, client)
return port_ip_address
def get_ip_addresses(self, task):
"""Get IP addresses for all ports in `task`.
:param task: a TaskManager instance.
:returns: List of IP addresses associated with task.ports.
"""
client = _build_client(task.context.auth_token)
failures = []
ip_addresses = []
for port in task.ports:
try:
port_ip_address = self._get_port_ip_address(task, port.uuid,
client)
ip_addresses.append(port_ip_address)
except (exception.FailedToGetIPAddressOnPort,
exception.InvalidIPv4Address):
failures.append(port.uuid)
if failures:
LOG.warn(_LW("Some errors were encountered on node %(node)s"
" while retrieving IP address on the following"
" ports: %(ports)s."),
{'node': task.node.uuid, 'ports': failures})
return ip_addresses
|
ict-felix/stack | refs/heads/master | ofam/src/src/foam/sfa/rspecs/elements/location.py | 3 | from foam.sfa.rspecs.elements.element import Element
class Location(Element):
fields = [
'country',
'longitude',
'latitude',
]
|
luckylavish/zamboni | refs/heads/master | mkt/constants/tests/test_platforms.py | 13 | from django.test.client import RequestFactory
from nose.tools import eq_
from tower import ugettext as _
import mkt.site.tests
from mkt.constants.platforms import FREE_PLATFORMS, PAID_PLATFORMS
class TestPlatforms(mkt.site.tests.TestCase):
def test_free_platforms(self):
platforms = FREE_PLATFORMS()
expected = (
('free-firefoxos', _('Firefox OS')),
('free-desktop', _('Firefox for Desktop')),
('free-android-mobile', _('Firefox Mobile')),
('free-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
def test_paid_platforms_default(self):
platforms = PAID_PLATFORMS()
expected = (
('paid-firefoxos', _('Firefox OS')),
)
eq_(platforms, expected)
def test_paid_platforms_android_payments_waffle_on(self):
self.create_flag('android-payments')
platforms = PAID_PLATFORMS(request=RequestFactory())
expected = (
('paid-firefoxos', _('Firefox OS')),
('paid-android-mobile', _('Firefox Mobile')),
('paid-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
|
matiasb/django | refs/heads/master | tests/migrations/migrations_test_apps/conflicting_app_with_dependencies/migrations/0002_conflicting_second.py | 241 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("conflicting_app_with_dependencies", "0001_initial"),
]
operations = [
migrations.CreateModel(
"Something",
[
("id", models.AutoField(primary_key=True)),
],
)
]
|
davharris/retriever | refs/heads/master | try_install_all.py | 2 | """Attempt to install all datasets into all database management systems
This module, when run, attempts to install datasets from all Retriever scripts
in the /scripts folder (except for those listed in IGNORE), for each engine in
ENGINE_LIST() from __init__.py. In other words, it runs trys to install using
all possible combinations of database platform and script and checks to
see if there are any errors. It does not check the values in the database.
"""
import os
import sys
from retriever.lib.tools import choose_engine
from retriever import MODULE_LIST, ENGINE_LIST, SCRIPT_LIST
MODULE_LIST = MODULE_LIST()
ENGINE_LIST = ENGINE_LIST()
if len(sys.argv) > 1:
ENGINE_LIST = [e for e in ENGINE_LIST
if e.name in sys.argv[1:]
or e.abbreviation in sys.argv[1:]]
SCRIPT_LIST = SCRIPT_LIST()
TEST_ENGINES = {}
IGNORE = ["AvianBodyMass", "FIA"]
for engine in ENGINE_LIST:
opts = {}
print "** %s **" % engine.name
opts["engine"] = engine.abbreviation
try:
TEST_ENGINES[engine.abbreviation] = choose_engine(opts)
TEST_ENGINES[engine.abbreviation].get_cursor()
except:
TEST_ENGINES[engine.abbreviation] = None
pass
errors = []
for module in MODULE_LIST:
for (key, value) in TEST_ENGINES.items():
if value and not module.SCRIPT.shortname in IGNORE:
print "==>", module.__name__, value.name
try:
module.SCRIPT.download(value)
except KeyboardInterrupt:
pass
except Exception as e:
print "ERROR."
errors.append((key, module.__name__, e))
print('')
if errors:
print("Engine, Dataset, Error")
for error in errors:
print(error)
else:
print("All tests passed")
|
CloudServer/cinder | refs/heads/master | cinder/__init__.py | 43 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`cinder` -- Cloud IaaS Platform
===================================
.. automodule:: cinder
:platform: Unix
:synopsis: Infrastructure-as-a-Service Cloud platform.
.. moduleauthor:: Jesse Andrews <jesse@ansolabs.com>
.. moduleauthor:: Devin Carlen <devin.carlen@gmail.com>
.. moduleauthor:: Vishvananda Ishaya <vishvananda@gmail.com>
.. moduleauthor:: Joshua McKenty <joshua@cognition.ca>
.. moduleauthor:: Manish Singh <yosh@gimp.org>
.. moduleauthor:: Andy Smith <andy@anarkystic.com>
"""
|
moijes12/oh-mainline | refs/heads/master | vendor/packages/gdata/src/gdata/tlslite/utils/PyCrypto_TripleDES.py | 359 | """PyCrypto 3DES implementation."""
from cryptomath import *
from TripleDES import *
if pycryptoLoaded:
import Crypto.Cipher.DES3
def new(key, mode, IV):
return PyCrypto_TripleDES(key, mode, IV)
class PyCrypto_TripleDES(TripleDES):
def __init__(self, key, mode, IV):
TripleDES.__init__(self, key, mode, IV, "pycrypto")
self.context = Crypto.Cipher.DES3.new(key, mode, IV)
def encrypt(self, plaintext):
return self.context.encrypt(plaintext)
def decrypt(self, ciphertext):
return self.context.decrypt(ciphertext) |
mubeta06/python | refs/heads/master | signal_processing/sp/mls.py | 1 | #!/usr/bin/env python
"""Module providing the functionality to generate Maximal Length Sequences.
Based on wiki's description and polynomial representation.
http://en.wikipedia.org/wiki/Maximum_length_sequence
"""
import numpy
bittaps = {2:[1], 3:[2], 4:[3], 5:[2], 6:[5], 7:[6], 8:[4,5,6], 9:[5],
10:[7], 11:[9]}
class Error(Exception):
"""This is a user-defined exception for errors raised by this module."""
pass
def lfsr(taps, buf):
"""Function implements a linear feedback shift register
taps: List of Polynomial exponents for non-zero terms other than 1 and n
buf: List of buffer initialisation values as 1's and 0's or booleans
"""
nbits = len(buf)
sr = numpy.array(buf, dtype='bool')
out = []
for i in range((2**nbits)-1):
feedback = sr[0]
out.append(feedback)
for t in taps:
feedback ^= sr[t]
sr = numpy.roll(sr, -1)
sr[-1] = feedback
return out
def mls(n, seed=None):
"""Generate a Maximal Length Sequence 2^n - 1 bits long
"""
if n in bittaps:
taps = bittaps[n]
else:
raise Error('taps for %s bits unknown' % str(n))
if seed is None:
seed = list(numpy.ones(n))
elif len(seed) != n:
raise Error('length of seed must equal n')
return lfsr(taps, seed)
def main(nbits):
"""Main Program"""
import pylab
import filter
nbits = int(nbits)
m = mls(nbits)
pylab.figure()
pylab.title('%d bit M-Sequence Periodic Autocorrelation' % nbits)
m = numpy.where(m, 1.0, -1.0)
pylab.plot((numpy.roll(filter.ccorr(m, m).real, 2**nbits/2 - 1)))
pylab.xlim(0, len(m))
pylab.show()
if __name__ == '__main__':
import sys
main(sys.argv[1])
|
amaloz/obfuscation | refs/heads/master | pyobf/obfuscator.py | 1 | import pyobf.utils as utils
import os, time
MMAP_CLT = 0x00
MMAP_GGHLITE = 0x01
MMAP_DUMMY = 0x02
def get_mmap_flag(mmap):
if mmap == 'CLT':
return MMAP_CLT
elif mmap == 'GGH':
return MMAP_GGHLITE
else:
return MMAP_DUMMY
class Obfuscator(object):
def __init__(self, obf, mmap, base=None, verbose=False, nthreads=None,
ncores=None):
self._state = None
self._verbose = verbose
self._nthreads = nthreads
self._ncores = ncores
self._base = base
self.logger = utils.make_logger(self._verbose)
self._mmap = get_mmap_flag(mmap)
def _remove_old(self, directory):
# remove old files in obfuscation directory
if os.path.isdir(directory):
files = os.listdir(directory)
for file in os.listdir(directory):
p = os.path.join(directory, file)
os.unlink(p)
def obfuscate(self, circuit, secparam, directory, kappa=None, formula=True):
raise NotImplementedError
def _evaluate(self, directory, inp, f, obf, flags):
self.logger('Evaluating %s...' % inp)
start = time.time()
files = os.listdir(directory)
inputs = sorted(filter(lambda s: 'input' in s, files))
result = f(directory, inp, self._mmap, len(inputs), self._ncores, flags)
end = time.time()
self.logger('Took: %f' % (end - start))
if self._verbose:
obf.max_mem_usage()
return result
def evaluate(self, directory, inp):
raise NotImplementedError
def cleanup(self):
raise NotImplementedError
|
pacificIT/mopidy | refs/heads/develop | mopidy/mpd/protocol/__init__.py | 15 | """
This is Mopidy's MPD protocol implementation.
This is partly based upon the `MPD protocol documentation
<http://www.musicpd.org/doc/protocol/>`_, which is a useful resource, but it is
rather incomplete with regards to data formats, both for requests and
responses. Thus, we have had to talk a great deal with the the original `MPD
server <http://mpd.wikia.com/>`_ using telnet to get the details we need to
implement our own MPD server which is compatible with the numerous existing
`MPD clients <http://mpd.wikia.com/wiki/Clients>`_.
"""
from __future__ import absolute_import, unicode_literals
import inspect
from mopidy.mpd import exceptions
#: The MPD protocol uses UTF-8 for encoding all data.
ENCODING = 'UTF-8'
#: The MPD protocol uses ``\n`` as line terminator.
LINE_TERMINATOR = '\n'
#: The MPD protocol version is 0.19.0.
VERSION = '0.19.0'
def load_protocol_modules():
"""
The protocol modules must be imported to get them registered in
:attr:`commands`.
"""
from . import ( # noqa
audio_output, channels, command_list, connection, current_playlist,
mount, music_db, playback, reflection, status, stickers,
stored_playlists)
def INT(value): # noqa: N802
"""Converts a value that matches [+-]?\d+ into and integer."""
if value is None:
raise ValueError('None is not a valid integer')
# TODO: check for whitespace via value != value.strip()?
return int(value)
def UINT(value): # noqa: N802
"""Converts a value that matches \d+ into an integer."""
if value is None:
raise ValueError('None is not a valid integer')
if not value.isdigit():
raise ValueError('Only positive numbers are allowed')
return int(value)
def BOOL(value): # noqa: N802
"""Convert the values 0 and 1 into booleans."""
if value in ('1', '0'):
return bool(int(value))
raise ValueError('%r is not 0 or 1' % value)
def RANGE(value): # noqa: N802
"""Convert a single integer or range spec into a slice
``n`` should become ``slice(n, n+1)``
``n:`` should become ``slice(n, None)``
``n:m`` should become ``slice(n, m)`` and ``m > n`` must hold
"""
if ':' in value:
start, stop = value.split(':', 1)
start = UINT(start)
if stop.strip():
stop = UINT(stop)
if start >= stop:
raise ValueError('End must be larger than start')
else:
stop = None
else:
start = UINT(value)
stop = start + 1
return slice(start, stop)
class Commands(object):
"""Collection of MPD commands to expose to users.
Normally used through the global instance which command handlers have been
installed into.
"""
def __init__(self):
self.handlers = {}
# TODO: consider removing auth_required and list_command in favour of
# additional command instances to register in?
def add(self, name, auth_required=True, list_command=True, **validators):
"""Create a decorator that registers a handler and validation rules.
Additional keyword arguments are treated as converters/validators to
apply to tokens converting them to proper Python types.
Requirements for valid handlers:
- must accept a context argument as the first arg.
- may not use variable keyword arguments, ``**kwargs``.
- may use variable arguments ``*args`` *or* a mix of required and
optional arguments.
Decorator returns the unwrapped function so that tests etc can use the
functions with values with correct python types instead of strings.
:param string name: Name of the command being registered.
:param bool auth_required: If authorization is required.
:param bool list_command: If command should be listed in reflection.
"""
def wrapper(func):
if name in self.handlers:
raise ValueError('%s already registered' % name)
args, varargs, keywords, defaults = inspect.getargspec(func)
defaults = dict(zip(args[-len(defaults or []):], defaults or []))
if not args and not varargs:
raise TypeError('Handler must accept at least one argument.')
if len(args) > 1 and varargs:
raise TypeError(
'*args may not be combined with regular arguments')
if not set(validators.keys()).issubset(args):
raise TypeError('Validator for non-existent arg passed')
if keywords:
raise TypeError('**kwargs are not permitted')
def validate(*args, **kwargs):
if varargs:
return func(*args, **kwargs)
try:
callargs = inspect.getcallargs(func, *args, **kwargs)
except TypeError:
raise exceptions.MpdArgError(
'wrong number of arguments for "%s"' % name)
for key, value in callargs.items():
default = defaults.get(key, object())
if key in validators and value != default:
try:
callargs[key] = validators[key](value)
except ValueError:
raise exceptions.MpdArgError('incorrect arguments')
return func(**callargs)
validate.auth_required = auth_required
validate.list_command = list_command
self.handlers[name] = validate
return func
return wrapper
def call(self, tokens, context=None):
"""Find and run the handler registered for the given command.
If the handler was registered with any converters/validators they will
be run before calling the real handler.
:param list tokens: List of tokens to process
:param context: MPD context.
:type context: :class:`~mopidy.mpd.dispatcher.MpdContext`
"""
if not tokens:
raise exceptions.MpdNoCommand()
if tokens[0] not in self.handlers:
raise exceptions.MpdUnknownCommand(command=tokens[0])
return self.handlers[tokens[0]](context, *tokens[1:])
#: Global instance to install commands into
commands = Commands()
|
hyphyphyph/lascaux | refs/heads/master | tests/test_widget.py | 1 | if __name__ == "__main__": import sys; sys.path.append('.')
import unittest
from petrified.widget import Widget
class TestWidget(unittest.TestCase):
def setUp(self):
self.POST = dict(first_name=u'Derek',
last_name=u'Mounce')
def test_init(self):
widget = Widget()
def test_ingest_POST(self):
widget = Widget(name='first_name')
widget.ingest_POST(self.POST)
self.assertEqual(widget.value, u'Derek')
def test_validate(self):
widget = Widget(required=True, name='first_name')
widget.validate()
self.assertEqual(widget.value, u'')
self.assertTrue(widget.error)
def test_render(self):
widget = Widget(name='first_name')
self.assertEqual(widget.render(),
'<input type="hidden" name="first_name" />')
self.assertEqual(unicode(widget),
'<input type="hidden" name="first_name" />')
if __name__ == "__main__":
unittest.main()
|
MQQiang/kbengine | refs/heads/master | kbe/src/lib/python/Lib/nntplib.py | 63 | """An NNTP client class based on:
- RFC 977: Network News Transfer Protocol
- RFC 2980: Common NNTP Extensions
- RFC 3977: Network News Transfer Protocol (version 2)
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print('Group', name, 'has', count, 'articles, range', first, 'to', last)
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last))
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'rb') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
# RFC 977 by Brian Kantor and Phil Lapsley.
# xover, xgtitle, xpath, date methods by Kevan Heydon
# Incompatible changes from the 2.x nntplib:
# - all commands are encoded as UTF-8 data (using the "surrogateescape"
# error handler), except for raw message data (POST, IHAVE)
# - all responses are decoded as UTF-8 data (using the "surrogateescape"
# error handler), except for raw message data (ARTICLE, HEAD, BODY)
# - the `file` argument to various methods is keyword-only
#
# - NNTP.date() returns a datetime object
# - NNTP.newgroups() and NNTP.newnews() take a datetime (or date) object,
# rather than a pair of (date, time) strings.
# - NNTP.newgroups() and NNTP.list() return a list of GroupInfo named tuples
# - NNTP.descriptions() returns a dict mapping group names to descriptions
# - NNTP.xover() returns a list of dicts mapping field names (header or metadata)
# to field values; each dict representing a message overview.
# - NNTP.article(), NNTP.head() and NNTP.body() return a (response, ArticleInfo)
# tuple.
# - the "internal" methods have been marked private (they now start with
# an underscore)
# Other changes from the 2.x/3.1 nntplib:
# - automatic querying of capabilities at connect
# - New method NNTP.getcapabilities()
# - New method NNTP.over()
# - New helper function decode_header()
# - NNTP.post() and NNTP.ihave() accept file objects, bytes-like objects and
# arbitrary iterables yielding lines.
# - An extensive test suite :-)
# TODO:
# - return structured data (GroupInfo etc.) everywhere
# - support HDR
# Imports
import re
import socket
import collections
import datetime
import warnings
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
from email.header import decode_header as _email_decode_header
from socket import _GLOBAL_DEFAULT_TIMEOUT
__all__ = ["NNTP",
"NNTPError", "NNTPReplyError", "NNTPTemporaryError",
"NNTPPermanentError", "NNTPProtocolError", "NNTPDataError",
"decode_header",
]
# maximal line length when calling readline(). This is to prevent
# reading arbitrary length lines. RFC 3977 limits NNTP line length to
# 512 characters, including CRLF. We have selected 2048 just to be on
# the safe side.
_MAXLINE = 2048
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
# Standard port used by NNTP servers
NNTP_PORT = 119
NNTP_SSL_PORT = 563
# Response numbers that are followed by additional text (e.g. article)
_LONGRESP = {
'100', # HELP
'101', # CAPABILITIES
'211', # LISTGROUP (also not multi-line with GROUP)
'215', # LIST
'220', # ARTICLE
'221', # HEAD, XHDR
'222', # BODY
'224', # OVER, XOVER
'225', # HDR
'230', # NEWNEWS
'231', # NEWGROUPS
'282', # XGTITLE
}
# Default decoded value for LIST OVERVIEW.FMT if not supported
_DEFAULT_OVERVIEW_FMT = [
"subject", "from", "date", "message-id", "references", ":bytes", ":lines"]
# Alternative names allowed in LIST OVERVIEW.FMT response
_OVERVIEW_FMT_ALTERNATIVES = {
'bytes': ':bytes',
'lines': ':lines',
}
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
_CRLF = b'\r\n'
GroupInfo = collections.namedtuple('GroupInfo',
['group', 'last', 'first', 'flag'])
ArticleInfo = collections.namedtuple('ArticleInfo',
['number', 'message_id', 'lines'])
# Helper function(s)
def decode_header(header_str):
"""Takes an unicode string representing a munged header value
and decodes it as a (possibly non-ASCII) readable value."""
parts = []
for v, enc in _email_decode_header(header_str):
if isinstance(v, bytes):
parts.append(v.decode(enc or 'ascii'))
else:
parts.append(v)
return ''.join(parts)
def _parse_overview_fmt(lines):
"""Parse a list of string representing the response to LIST OVERVIEW.FMT
and return a list of header/metadata names.
Raises NNTPDataError if the response is not compliant
(cf. RFC 3977, section 8.4)."""
fmt = []
for line in lines:
if line[0] == ':':
# Metadata name (e.g. ":bytes")
name, _, suffix = line[1:].partition(':')
name = ':' + name
else:
# Header name (e.g. "Subject:" or "Xref:full")
name, _, suffix = line.partition(':')
name = name.lower()
name = _OVERVIEW_FMT_ALTERNATIVES.get(name, name)
# Should we do something with the suffix?
fmt.append(name)
defaults = _DEFAULT_OVERVIEW_FMT
if len(fmt) < len(defaults):
raise NNTPDataError("LIST OVERVIEW.FMT response too short")
if fmt[:len(defaults)] != defaults:
raise NNTPDataError("LIST OVERVIEW.FMT redefines default fields")
return fmt
def _parse_overview(lines, fmt, data_process_func=None):
"""Parse the response to a OVER or XOVER command according to the
overview format `fmt`."""
n_defaults = len(_DEFAULT_OVERVIEW_FMT)
overview = []
for line in lines:
fields = {}
article_number, *tokens = line.split('\t')
article_number = int(article_number)
for i, token in enumerate(tokens):
if i >= len(fmt):
# XXX should we raise an error? Some servers might not
# support LIST OVERVIEW.FMT and still return additional
# headers.
continue
field_name = fmt[i]
is_metadata = field_name.startswith(':')
if i >= n_defaults and not is_metadata:
# Non-default header names are included in full in the response
# (unless the field is totally empty)
h = field_name + ": "
if token and token[:len(h)].lower() != h:
raise NNTPDataError("OVER/XOVER response doesn't include "
"names of additional headers")
token = token[len(h):] if token else None
fields[fmt[i]] = token
overview.append((article_number, fields))
return overview
def _parse_datetime(date_str, time_str=None):
"""Parse a pair of (date, time) strings, and return a datetime object.
If only the date is given, it is assumed to be date and time
concatenated together (e.g. response to the DATE command).
"""
if time_str is None:
time_str = date_str[-6:]
date_str = date_str[:-6]
hours = int(time_str[:2])
minutes = int(time_str[2:4])
seconds = int(time_str[4:])
year = int(date_str[:-4])
month = int(date_str[-4:-2])
day = int(date_str[-2:])
# RFC 3977 doesn't say how to interpret 2-char years. Assume that
# there are no dates before 1970 on Usenet.
if year < 70:
year += 2000
elif year < 100:
year += 1900
return datetime.datetime(year, month, day, hours, minutes, seconds)
def _unparse_datetime(dt, legacy=False):
"""Format a date or datetime object as a pair of (date, time) strings
in the format required by the NEWNEWS and NEWGROUPS commands. If a
date object is passed, the time is assumed to be midnight (00h00).
The returned representation depends on the legacy flag:
* if legacy is False (the default):
date has the YYYYMMDD format and time the HHMMSS format
* if legacy is True:
date has the YYMMDD format and time the HHMMSS format.
RFC 3977 compliant servers should understand both formats; therefore,
legacy is only needed when talking to old servers.
"""
if not isinstance(dt, datetime.datetime):
time_str = "000000"
else:
time_str = "{0.hour:02d}{0.minute:02d}{0.second:02d}".format(dt)
y = dt.year
if legacy:
y = y % 100
date_str = "{0:02d}{1.month:02d}{1.day:02d}".format(y, dt)
else:
date_str = "{0:04d}{1.month:02d}{1.day:02d}".format(y, dt)
return date_str, time_str
if _have_ssl:
def _encrypt_on(sock, context, hostname):
"""Wrap a socket in SSL/TLS. Arguments:
- sock: Socket to wrap
- context: SSL context to use for the encrypted connection
Returns:
- sock: New, encrypted socket.
"""
# Generate a default SSL context if none was passed.
if context is None:
context = ssl._create_stdlib_context()
server_hostname = hostname if ssl.HAS_SNI else None
return context.wrap_socket(sock, server_hostname=server_hostname)
# The classes themselves
class _NNTPBase:
# UTF-8 is the character set for all NNTP commands and responses: they
# are automatically encoded (when sending) and decoded (and receiving)
# by this class.
# However, some multi-line data blocks can contain arbitrary bytes (for
# example, latin-1 or utf-16 data in the body of a message). Commands
# taking (POST, IHAVE) or returning (HEAD, BODY, ARTICLE) raw message
# data will therefore only accept and produce bytes objects.
# Furthermore, since there could be non-compliant servers out there,
# we use 'surrogateescape' as the error handler for fault tolerance
# and easy round-tripping. This could be useful for some applications
# (e.g. NNTP gateways).
encoding = 'utf-8'
errors = 'surrogateescape'
def __init__(self, file, host,
readermode=None, timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""Initialize an instance. Arguments:
- file: file-like object (open for read/write in binary mode)
- host: hostname of the server
- readermode: if true, send 'mode reader' command after
connecting.
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.file = file
self.debugging = 0
self.welcome = self._getresp()
# Inquire about capabilities (RFC 3977).
self._caps = None
self.getcapabilities()
# 'MODE READER' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'MODE READER' and 'AUTHINFO' need to
# arrive differs between some NNTP servers. If _setreadermode() fails
# with an authorization failed error, it will set this to True;
# the login() routine will interpret that as a request to try again
# after performing its normal function.
# Enable only if we're not already in READER mode anyway.
self.readermode_afterauth = False
if readermode and 'READER' not in self._caps:
self._setreadermode()
if not self.readermode_afterauth:
# Capabilities might have changed after MODE READER
self._caps = None
self.getcapabilities()
# RFC 4642 2.2.2: Both the client and the server MUST know if there is
# a TLS session active. A client MUST NOT attempt to start a TLS
# session if a TLS session is already active.
self.tls_on = False
# Log in and encryption setup order is left to subclasses.
self.authenticated = False
def __enter__(self):
return self
def __exit__(self, *args):
is_connected = lambda: hasattr(self, "file")
if is_connected():
try:
self.quit()
except (OSError, EOFError):
pass
finally:
if is_connected():
self._close()
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging: print('*welcome*', repr(self.welcome))
return self.welcome
def getcapabilities(self):
"""Get the server capabilities, as read by __init__().
If the CAPABILITIES command is not supported, an empty dict is
returned."""
if self._caps is None:
self.nntp_version = 1
self.nntp_implementation = None
try:
resp, caps = self.capabilities()
except (NNTPPermanentError, NNTPTemporaryError):
# Server doesn't support capabilities
self._caps = {}
else:
self._caps = caps
if 'VERSION' in caps:
# The server can advertise several supported versions,
# choose the highest.
self.nntp_version = max(map(int, caps['VERSION']))
if 'IMPLEMENTATION' in caps:
self.nntp_implementation = ' '.join(caps['IMPLEMENTATION'])
return self._caps
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
def _putline(self, line):
"""Internal: send one line to the server, appending CRLF.
The `line` must be a bytes-like object."""
line = line + _CRLF
if self.debugging > 1: print('*put*', repr(line))
self.file.write(line)
self.file.flush()
def _putcmd(self, line):
"""Internal: send one command to the server (through _putline()).
The `line` must be an unicode string."""
if self.debugging: print('*cmd*', repr(line))
line = line.encode(self.encoding, self.errors)
self._putline(line)
def _getline(self, strip_crlf=True):
"""Internal: return one line from the server, stripping _CRLF.
Raise EOFError if the connection is closed.
Returns a bytes object."""
line = self.file.readline(_MAXLINE +1)
if len(line) > _MAXLINE:
raise NNTPDataError('line too long')
if self.debugging > 1:
print('*get*', repr(line))
if not line: raise EOFError
if strip_crlf:
if line[-2:] == _CRLF:
line = line[:-2]
elif line[-1:] in _CRLF:
line = line[:-1]
return line
def _getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error.
Returns an unicode string."""
resp = self._getline()
if self.debugging: print('*resp*', repr(resp))
resp = resp.decode(self.encoding, self.errors)
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def _getlongresp(self, file=None):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error.
Returns a (response, lines) tuple where `response` is an unicode
string and `lines` is a list of bytes objects.
If `file` is a file-like object, it must be open in binary mode.
"""
openedFile = None
try:
# If a string was passed then open a file with that name
if isinstance(file, (str, bytes)):
openedFile = file = open(file, "wb")
resp = self._getresp()
if resp[:3] not in _LONGRESP:
raise NNTPReplyError(resp)
lines = []
if file is not None:
# XXX lines = None instead?
terminators = (b'.' + _CRLF, b'.\n')
while 1:
line = self._getline(False)
if line in terminators:
break
if line.startswith(b'..'):
line = line[1:]
file.write(line)
else:
terminator = b'.'
while 1:
line = self._getline()
if line == terminator:
break
if line.startswith(b'..'):
line = line[1:]
lines.append(line)
finally:
# If this method created the file, then it must close it
if openedFile:
openedFile.close()
return resp, lines
def _shortcmd(self, line):
"""Internal: send a command and get the response.
Same return value as _getresp()."""
self._putcmd(line)
return self._getresp()
def _longcmd(self, line, file=None):
"""Internal: send a command and get the response plus following text.
Same return value as _getlongresp()."""
self._putcmd(line)
return self._getlongresp(file)
def _longcmdstring(self, line, file=None):
"""Internal: send a command and get the response plus following text.
Same as _longcmd() and _getlongresp(), except that the returned `lines`
are unicode strings rather than bytes objects.
"""
self._putcmd(line)
resp, list = self._getlongresp(file)
return resp, [line.decode(self.encoding, self.errors)
for line in list]
def _getoverviewfmt(self):
"""Internal: get the overview format. Queries the server if not
already done, else returns the cached value."""
try:
return self._cachedoverviewfmt
except AttributeError:
pass
try:
resp, lines = self._longcmdstring("LIST OVERVIEW.FMT")
except NNTPPermanentError:
# Not supported by server?
fmt = _DEFAULT_OVERVIEW_FMT[:]
else:
fmt = _parse_overview_fmt(lines)
self._cachedoverviewfmt = fmt
return fmt
def _grouplist(self, lines):
# Parse lines into "group last first flag"
return [GroupInfo(*line.split()) for line in lines]
def capabilities(self):
"""Process a CAPABILITIES command. Not supported by all servers.
Return:
- resp: server response if successful
- caps: a dictionary mapping capability names to lists of tokens
(for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] })
"""
caps = {}
resp, lines = self._longcmdstring("CAPABILITIES")
for line in lines:
name, *tokens = line.split()
caps[name] = tokens
return resp, caps
def newgroups(self, date, *, file=None):
"""Process a NEWGROUPS command. Arguments:
- date: a date or datetime object
Return:
- resp: server response if successful
- list: list of newsgroup names
"""
if not isinstance(date, (datetime.date, datetime.date)):
raise TypeError(
"the date parameter must be a date or datetime object, "
"not '{:40}'".format(date.__class__.__name__))
date_str, time_str = _unparse_datetime(date, self.nntp_version < 2)
cmd = 'NEWGROUPS {0} {1}'.format(date_str, time_str)
resp, lines = self._longcmdstring(cmd, file)
return resp, self._grouplist(lines)
def newnews(self, group, date, *, file=None):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: a date or datetime object
Return:
- resp: server response if successful
- list: list of message ids
"""
if not isinstance(date, (datetime.date, datetime.date)):
raise TypeError(
"the date parameter must be a date or datetime object, "
"not '{:40}'".format(date.__class__.__name__))
date_str, time_str = _unparse_datetime(date, self.nntp_version < 2)
cmd = 'NEWNEWS {0} {1} {2}'.format(group, date_str, time_str)
return self._longcmdstring(cmd, file)
def list(self, group_pattern=None, *, file=None):
"""Process a LIST or LIST ACTIVE command. Arguments:
- group_pattern: a pattern indicating which groups to query
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)
"""
if group_pattern is not None:
command = 'LIST ACTIVE ' + group_pattern
else:
command = 'LIST'
resp, lines = self._longcmdstring(command, file)
return resp, self._grouplist(lines)
def _getdescriptions(self, group_pattern, return_all):
line_pat = re.compile('^(?P<group>[^ \t]+)[ \t]+(.*)$')
# Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
resp, lines = self._longcmdstring('LIST NEWSGROUPS ' + group_pattern)
if not resp.startswith('215'):
# Now the deprecated XGTITLE. This either raises an error
# or succeeds with the same output structure as LIST
# NEWSGROUPS.
resp, lines = self._longcmdstring('XGTITLE ' + group_pattern)
groups = {}
for raw_line in lines:
match = line_pat.search(raw_line.strip())
if match:
name, desc = match.group(1, 2)
if not return_all:
return desc
groups[name] = desc
if return_all:
return resp, groups
else:
# Nothing found
return ''
def description(self, group):
"""Get a description for a single group. If more than one
group matches ('group' is a pattern), return the first. If no
group matches, return an empty string.
This elides the response code from the server, since it can
only be '215' or '285' (for xgtitle) anyway. If the response
code is needed, use the 'descriptions' method.
NOTE: This neither checks for a wildcard in 'group' nor does
it check whether the group actually exists."""
return self._getdescriptions(group, False)
def descriptions(self, group_pattern):
"""Get descriptions for a range of groups."""
return self._getdescriptions(group_pattern, True)
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles
- first: first article number
- last: last article number
- name: the group name
"""
resp = self._shortcmd('GROUP ' + name)
if not resp.startswith('211'):
raise NNTPReplyError(resp)
words = resp.split()
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = words[4].lower()
return resp, int(count), int(first), int(last), name
def help(self, *, file=None):
"""Process a HELP command. Argument:
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of strings returned by the server in response to the
HELP command
"""
return self._longcmdstring('HELP', file)
def _statparse(self, resp):
"""Internal: parse the response line of a STAT, NEXT, LAST,
ARTICLE, HEAD or BODY command."""
if not resp.startswith('22'):
raise NNTPReplyError(resp)
words = resp.split()
art_num = int(words[1])
message_id = words[2]
return resp, art_num, message_id
def _statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self._shortcmd(line)
return self._statparse(resp)
def stat(self, message_spec=None):
"""Process a STAT command. Argument:
- message_spec: article number or message id (if not specified,
the current article is selected)
Returns:
- resp: server response if successful
- art_num: the article number
- message_id: the message id
"""
if message_spec:
return self._statcmd('STAT {0}'.format(message_spec))
else:
return self._statcmd('STAT')
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self._statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self._statcmd('LAST')
def _artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, lines = self._longcmd(line, file)
resp, art_num, message_id = self._statparse(resp)
return resp, ArticleInfo(art_num, message_id, lines)
def head(self, message_spec=None, *, file=None):
"""Process a HEAD command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the headers in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of header lines)
"""
if message_spec is not None:
cmd = 'HEAD {0}'.format(message_spec)
else:
cmd = 'HEAD'
return self._artcmd(cmd, file)
def body(self, message_spec=None, *, file=None):
"""Process a BODY command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the body in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of body lines)
"""
if message_spec is not None:
cmd = 'BODY {0}'.format(message_spec)
else:
cmd = 'BODY'
return self._artcmd(cmd, file)
def article(self, message_spec=None, *, file=None):
"""Process an ARTICLE command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the article in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of article lines)
"""
if message_spec is not None:
cmd = 'ARTICLE {0}'.format(message_spec)
else:
cmd = 'ARTICLE'
return self._artcmd(cmd, file)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful
"""
return self._shortcmd('SLAVE')
def xhdr(self, hdr, str, *, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of (nr, value) strings
"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self._longcmdstring('XHDR {0} {1}'.format(hdr, str), file)
def remove_number(line):
m = pat.match(line)
return m.group(1, 2) if m else line
return resp, [remove_number(line) for line in lines]
def xover(self, start, end, *, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of dicts containing the response fields
"""
resp, lines = self._longcmdstring('XOVER {0}-{1}'.format(start, end),
file)
fmt = self._getoverviewfmt()
return resp, _parse_overview(lines, fmt)
def over(self, message_spec, *, file=None):
"""Process an OVER command. If the command isn't supported, fall
back to XOVER. Arguments:
- message_spec:
- either a message id, indicating the article to fetch
information about
- or a (start, end) tuple, indicating a range of article numbers;
if end is None, information up to the newest message will be
retrieved
- or None, indicating the current article number must be used
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of dicts containing the response fields
NOTE: the "message id" form isn't supported by XOVER
"""
cmd = 'OVER' if 'OVER' in self._caps else 'XOVER'
if isinstance(message_spec, (tuple, list)):
start, end = message_spec
cmd += ' {0}-{1}'.format(start, end or '')
elif message_spec is not None:
cmd = cmd + ' ' + message_spec
resp, lines = self._longcmdstring(cmd, file)
fmt = self._getoverviewfmt()
return resp, _parse_overview(lines, fmt)
def xgtitle(self, group, *, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
warnings.warn("The XGTITLE extension is not actively used, "
"use descriptions() instead",
DeprecationWarning, 2)
line_pat = re.compile('^([^ \t]+)[ \t]+(.*)$')
resp, raw_lines = self._longcmdstring('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self, id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article
"""
warnings.warn("The XPATH extension is not actively used",
DeprecationWarning, 2)
resp = self._shortcmd('XPATH {0}'.format(id))
if not resp.startswith('223'):
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date(self):
"""Process the DATE command.
Returns:
- resp: server response if successful
- date: datetime object
"""
resp = self._shortcmd("DATE")
if not resp.startswith('111'):
raise NNTPReplyError(resp)
elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1]
if len(date) != 14:
raise NNTPDataError(resp)
return resp, _parse_datetime(date, None)
def _post(self, command, f):
resp = self._shortcmd(command)
# Raises a specific exception if posting is not allowed
if not resp.startswith('3'):
raise NNTPReplyError(resp)
if isinstance(f, (bytes, bytearray)):
f = f.splitlines()
# We don't use _putline() because:
# - we don't want additional CRLF if the file or iterable is already
# in the right format
# - we don't want a spurious flush() after each line is written
for line in f:
if not line.endswith(_CRLF):
line = line.rstrip(b"\r\n") + _CRLF
if line.startswith(b'.'):
line = b'.' + line
self.file.write(line)
self.file.write(b".\r\n")
self.file.flush()
return self._getresp()
def post(self, data):
"""Process a POST command. Arguments:
- data: bytes object, iterable or file containing the article
Returns:
- resp: server response if successful"""
return self._post('POST', data)
def ihave(self, message_id, data):
"""Process an IHAVE command. Arguments:
- message_id: message-id of the article
- data: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
return self._post('IHAVE {0}'.format(message_id), data)
def _close(self):
self.file.close()
del self.file
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
try:
resp = self._shortcmd('QUIT')
finally:
self._close()
return resp
def login(self, user=None, password=None, usenetrc=True):
if self.authenticated:
raise ValueError("Already logged in.")
if not user and not usenetrc:
raise ValueError(
"At least one of `user` and `usenetrc` must be specified")
# If no login/password was specified but netrc was requested,
# try to get them from ~/.netrc
# Presume that if .netrc has an entry, NNRP authentication is required.
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(self.host)
if auth:
user = auth[0]
password = auth[2]
except OSError:
pass
# Perform NNTP authentication if needed.
if not user:
return
resp = self._shortcmd('authinfo user ' + user)
if resp.startswith('381'):
if not password:
raise NNTPReplyError(resp)
else:
resp = self._shortcmd('authinfo pass ' + password)
if not resp.startswith('281'):
raise NNTPPermanentError(resp)
# Capabilities might have changed after login
self._caps = None
self.getcapabilities()
# Attempt to send mode reader if it was requested after login.
# Only do so if we're not in reader mode already.
if self.readermode_afterauth and 'READER' not in self._caps:
self._setreadermode()
# Capabilities might have changed after MODE READER
self._caps = None
self.getcapabilities()
def _setreadermode(self):
try:
self.welcome = self._shortcmd('mode reader')
except NNTPPermanentError:
# Error 5xx, probably 'not implemented'
pass
except NNTPTemporaryError as e:
if e.response.startswith('480'):
# Need authorization before 'mode reader'
self.readermode_afterauth = True
else:
raise
if _have_ssl:
def starttls(self, context=None):
"""Process a STARTTLS command. Arguments:
- context: SSL context to use for the encrypted connection
"""
# Per RFC 4642, STARTTLS MUST NOT be sent after authentication or if
# a TLS session already exists.
if self.tls_on:
raise ValueError("TLS is already enabled.")
if self.authenticated:
raise ValueError("TLS cannot be started after authentication.")
resp = self._shortcmd('STARTTLS')
if resp.startswith('382'):
self.file.close()
self.sock = _encrypt_on(self.sock, context, self.host)
self.file = self.sock.makefile("rwb")
self.tls_on = True
# Capabilities may change after TLS starts up, so ask for them
# again.
self._caps = None
self.getcapabilities()
else:
raise NNTPError("TLS failed to start.")
class NNTP(_NNTPBase):
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=False,
timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
- usenetrc: allow loading username and password from ~/.netrc file
if not specified explicitly
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.create_connection((host, port), timeout)
file = self.sock.makefile("rwb")
_NNTPBase.__init__(self, file, host,
readermode, timeout)
if user or usenetrc:
self.login(user, password, usenetrc)
def _close(self):
try:
_NNTPBase._close(self)
finally:
self.sock.close()
if _have_ssl:
class NNTP_SSL(_NNTPBase):
def __init__(self, host, port=NNTP_SSL_PORT,
user=None, password=None, ssl_context=None,
readermode=None, usenetrc=False,
timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""This works identically to NNTP.__init__, except for the change
in default port and the `ssl_context` argument for SSL connections.
"""
self.sock = socket.create_connection((host, port), timeout)
self.sock = _encrypt_on(self.sock, ssl_context, host)
file = self.sock.makefile("rwb")
_NNTPBase.__init__(self, file, host,
readermode=readermode, timeout=timeout)
if user or usenetrc:
self.login(user, password, usenetrc)
def _close(self):
try:
_NNTPBase._close(self)
finally:
self.sock.close()
__all__.append("NNTP_SSL")
# Test retrieval when run as a script.
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="""\
nntplib built-in demo - display the latest articles in a newsgroup""")
parser.add_argument('-g', '--group', default='gmane.comp.python.general',
help='group to fetch messages from (default: %(default)s)')
parser.add_argument('-s', '--server', default='news.gmane.org',
help='NNTP server hostname (default: %(default)s)')
parser.add_argument('-p', '--port', default=-1, type=int,
help='NNTP port number (default: %s / %s)' % (NNTP_PORT, NNTP_SSL_PORT))
parser.add_argument('-n', '--nb-articles', default=10, type=int,
help='number of articles to fetch (default: %(default)s)')
parser.add_argument('-S', '--ssl', action='store_true', default=False,
help='use NNTP over SSL')
args = parser.parse_args()
port = args.port
if not args.ssl:
if port == -1:
port = NNTP_PORT
s = NNTP(host=args.server, port=port)
else:
if port == -1:
port = NNTP_SSL_PORT
s = NNTP_SSL(host=args.server, port=port)
caps = s.getcapabilities()
if 'STARTTLS' in caps:
s.starttls()
resp, count, first, last, name = s.group(args.group)
print('Group', name, 'has', count, 'articles, range', first, 'to', last)
def cut(s, lim):
if len(s) > lim:
s = s[:lim - 4] + "..."
return s
first = str(int(last) - args.nb_articles + 1)
resp, overviews = s.xover(first, last)
for artnum, over in overviews:
author = decode_header(over['from']).split('<', 1)[0]
subject = decode_header(over['subject'])
lines = int(over[':lines'])
print("{:7} {:20} {:42} ({})".format(
artnum, cut(author, 20), cut(subject, 42), lines)
)
s.quit()
|
BaesFr/Sick-Beard | refs/heads/development | lib/enzyme/asf.py | 180 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Thomas Schueppel <stain@acm.org>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
from exceptions import ParseError
import core
import logging
import string
import struct
__all__ = ['Parser']
# get logging object
log = logging.getLogger(__name__)
def _guid(input):
# Remove any '-'
s = string.join(string.split(input, '-'), '')
r = ''
if len(s) != 32:
return ''
for i in range(0, 16):
r += chr(int(s[2 * i:2 * i + 2], 16))
guid = struct.unpack('>IHHBB6s', r)
return guid
GUIDS = {
'ASF_Header_Object' : _guid('75B22630-668E-11CF-A6D9-00AA0062CE6C'),
'ASF_Data_Object' : _guid('75B22636-668E-11CF-A6D9-00AA0062CE6C'),
'ASF_Simple_Index_Object' : _guid('33000890-E5B1-11CF-89F4-00A0C90349CB'),
'ASF_Index_Object' : _guid('D6E229D3-35DA-11D1-9034-00A0C90349BE'),
'ASF_Media_Object_Index_Object' : _guid('FEB103F8-12AD-4C64-840F-2A1D2F7AD48C'),
'ASF_Timecode_Index_Object' : _guid('3CB73FD0-0C4A-4803-953D-EDF7B6228F0C'),
'ASF_File_Properties_Object' : _guid('8CABDCA1-A947-11CF-8EE4-00C00C205365'),
'ASF_Stream_Properties_Object' : _guid('B7DC0791-A9B7-11CF-8EE6-00C00C205365'),
'ASF_Header_Extension_Object' : _guid('5FBF03B5-A92E-11CF-8EE3-00C00C205365'),
'ASF_Codec_List_Object' : _guid('86D15240-311D-11D0-A3A4-00A0C90348F6'),
'ASF_Script_Command_Object' : _guid('1EFB1A30-0B62-11D0-A39B-00A0C90348F6'),
'ASF_Marker_Object' : _guid('F487CD01-A951-11CF-8EE6-00C00C205365'),
'ASF_Bitrate_Mutual_Exclusion_Object' : _guid('D6E229DC-35DA-11D1-9034-00A0C90349BE'),
'ASF_Error_Correction_Object' : _guid('75B22635-668E-11CF-A6D9-00AA0062CE6C'),
'ASF_Content_Description_Object' : _guid('75B22633-668E-11CF-A6D9-00AA0062CE6C'),
'ASF_Extended_Content_Description_Object' : _guid('D2D0A440-E307-11D2-97F0-00A0C95EA850'),
'ASF_Content_Branding_Object' : _guid('2211B3FA-BD23-11D2-B4B7-00A0C955FC6E'),
'ASF_Stream_Bitrate_Properties_Object' : _guid('7BF875CE-468D-11D1-8D82-006097C9A2B2'),
'ASF_Content_Encryption_Object' : _guid('2211B3FB-BD23-11D2-B4B7-00A0C955FC6E'),
'ASF_Extended_Content_Encryption_Object' : _guid('298AE614-2622-4C17-B935-DAE07EE9289C'),
'ASF_Alt_Extended_Content_Encryption_Obj' : _guid('FF889EF1-ADEE-40DA-9E71-98704BB928CE'),
'ASF_Digital_Signature_Object' : _guid('2211B3FC-BD23-11D2-B4B7-00A0C955FC6E'),
'ASF_Padding_Object' : _guid('1806D474-CADF-4509-A4BA-9AABCB96AAE8'),
'ASF_Extended_Stream_Properties_Object' : _guid('14E6A5CB-C672-4332-8399-A96952065B5A'),
'ASF_Advanced_Mutual_Exclusion_Object' : _guid('A08649CF-4775-4670-8A16-6E35357566CD'),
'ASF_Group_Mutual_Exclusion_Object' : _guid('D1465A40-5A79-4338-B71B-E36B8FD6C249'),
'ASF_Stream_Prioritization_Object' : _guid('D4FED15B-88D3-454F-81F0-ED5C45999E24'),
'ASF_Bandwidth_Sharing_Object' : _guid('A69609E6-517B-11D2-B6AF-00C04FD908E9'),
'ASF_Language_List_Object' : _guid('7C4346A9-EFE0-4BFC-B229-393EDE415C85'),
'ASF_Metadata_Object' : _guid('C5F8CBEA-5BAF-4877-8467-AA8C44FA4CCA'),
'ASF_Metadata_Library_Object' : _guid('44231C94-9498-49D1-A141-1D134E457054'),
'ASF_Index_Parameters_Object' : _guid('D6E229DF-35DA-11D1-9034-00A0C90349BE'),
'ASF_Media_Object_Index_Parameters_Obj' : _guid('6B203BAD-3F11-4E84-ACA8-D7613DE2CFA7'),
'ASF_Timecode_Index_Parameters_Object' : _guid('F55E496D-9797-4B5D-8C8B-604DFE9BFB24'),
'ASF_Audio_Media' : _guid('F8699E40-5B4D-11CF-A8FD-00805F5C442B'),
'ASF_Video_Media' : _guid('BC19EFC0-5B4D-11CF-A8FD-00805F5C442B'),
'ASF_Command_Media' : _guid('59DACFC0-59E6-11D0-A3AC-00A0C90348F6'),
'ASF_JFIF_Media' : _guid('B61BE100-5B4E-11CF-A8FD-00805F5C442B'),
'ASF_Degradable_JPEG_Media' : _guid('35907DE0-E415-11CF-A917-00805F5C442B'),
'ASF_File_Transfer_Media' : _guid('91BD222C-F21C-497A-8B6D-5AA86BFC0185'),
'ASF_Binary_Media' : _guid('3AFB65E2-47EF-40F2-AC2C-70A90D71D343'),
'ASF_Web_Stream_Media_Subtype' : _guid('776257D4-C627-41CB-8F81-7AC7FF1C40CC'),
'ASF_Web_Stream_Format' : _guid('DA1E6B13-8359-4050-B398-388E965BF00C'),
'ASF_No_Error_Correction' : _guid('20FB5700-5B55-11CF-A8FD-00805F5C442B'),
'ASF_Audio_Spread' : _guid('BFC3CD50-618F-11CF-8BB2-00AA00B4E220')}
class Asf(core.AVContainer):
"""
ASF video parser. The ASF format is also used for Microsft Windows
Media files like wmv.
"""
def __init__(self, file):
core.AVContainer.__init__(self)
self.mime = 'video/x-ms-asf'
self.type = 'asf format'
self._languages = []
self._extinfo = {}
h = file.read(30)
if len(h) < 30:
raise ParseError()
(guidstr, objsize, objnum, reserved1, \
reserved2) = struct.unpack('<16sQIBB', h)
guid = self._parseguid(guidstr)
if (guid != GUIDS['ASF_Header_Object']):
raise ParseError()
if reserved1 != 0x01 or reserved2 != 0x02:
raise ParseError()
log.debug(u'Header size: %d / %d objects' % (objsize, objnum))
header = file.read(objsize - 30)
for _ in range(0, objnum):
h = self._getnextheader(header)
header = header[h[1]:]
del self._languages
del self._extinfo
def _findstream(self, id):
for stream in self.video + self.audio:
if stream.id == id:
return stream
def _apply_extinfo(self, streamid):
stream = self._findstream(streamid)
if not stream or streamid not in self._extinfo:
return
stream.bitrate, stream.fps, langid, metadata = self._extinfo[streamid]
if langid is not None and langid >= 0 and langid < len(self._languages):
stream.language = self._languages[langid]
if metadata:
stream._appendtable('ASFMETADATA', metadata)
def _parseguid(self, string):
return struct.unpack('<IHHBB6s', string[:16])
def _parsekv(self, s):
pos = 0
(descriptorlen,) = struct.unpack('<H', s[pos:pos + 2])
pos += 2
descriptorname = s[pos:pos + descriptorlen]
pos += descriptorlen
descriptortype, valuelen = struct.unpack('<HH', s[pos:pos + 4])
pos += 4
descriptorvalue = s[pos:pos + valuelen]
pos += valuelen
value = None
if descriptortype == 0x0000:
# Unicode string
value = descriptorvalue
elif descriptortype == 0x0001:
# Byte Array
value = descriptorvalue
elif descriptortype == 0x0002:
# Bool (?)
value = struct.unpack('<I', descriptorvalue)[0] != 0
elif descriptortype == 0x0003:
# DWORD
value = struct.unpack('<I', descriptorvalue)[0]
elif descriptortype == 0x0004:
# QWORD
value = struct.unpack('<Q', descriptorvalue)[0]
elif descriptortype == 0x0005:
# WORD
value = struct.unpack('<H', descriptorvalue)[0]
else:
log.debug(u'Unknown Descriptor Type %d' % descriptortype)
return (pos, descriptorname, value)
def _parsekv2(self, s):
pos = 0
strno, descriptorlen, descriptortype, valuelen = struct.unpack('<2xHHHI', s[pos:pos + 12])
pos += 12
descriptorname = s[pos:pos + descriptorlen]
pos += descriptorlen
descriptorvalue = s[pos:pos + valuelen]
pos += valuelen
value = None
if descriptortype == 0x0000:
# Unicode string
value = descriptorvalue
elif descriptortype == 0x0001:
# Byte Array
value = descriptorvalue
elif descriptortype == 0x0002:
# Bool
value = struct.unpack('<H', descriptorvalue)[0] != 0
pass
elif descriptortype == 0x0003:
# DWORD
value = struct.unpack('<I', descriptorvalue)[0]
elif descriptortype == 0x0004:
# QWORD
value = struct.unpack('<Q', descriptorvalue)[0]
elif descriptortype == 0x0005:
# WORD
value = struct.unpack('<H', descriptorvalue)[0]
else:
log.debug(u'Unknown Descriptor Type %d' % descriptortype)
return (pos, descriptorname, value, strno)
def _getnextheader(self, s):
r = struct.unpack('<16sQ', s[:24])
(guidstr, objsize) = r
guid = self._parseguid(guidstr)
if guid == GUIDS['ASF_File_Properties_Object']:
log.debug(u'File Properties Object')
val = struct.unpack('<16s6Q4I', s[24:24 + 80])
(fileid, size, date, packetcount, duration, \
senddur, preroll, flags, minpack, maxpack, maxbr) = \
val
# FIXME: parse date to timestamp
self.length = duration / 10000000.0
elif guid == GUIDS['ASF_Stream_Properties_Object']:
log.debug(u'Stream Properties Object [%d]' % objsize)
streamtype = self._parseguid(s[24:40])
errortype = self._parseguid(s[40:56])
offset, typelen, errorlen, flags = struct.unpack('<QIIH', s[56:74])
strno = flags & 0x7f
encrypted = flags >> 15
if encrypted:
self._set('encrypted', True)
if streamtype == GUIDS['ASF_Video_Media']:
vi = core.VideoStream()
vi.width, vi.height, depth, codec, = struct.unpack('<4xII2xH4s', s[89:89 + 20])
vi.codec = codec
vi.id = strno
self.video.append(vi)
elif streamtype == GUIDS['ASF_Audio_Media']:
ai = core.AudioStream()
twocc, ai.channels, ai.samplerate, bitrate, block, \
ai.samplebits, = struct.unpack('<HHIIHH', s[78:78 + 16])
ai.bitrate = 8 * bitrate
ai.codec = twocc
ai.id = strno
self.audio.append(ai)
self._apply_extinfo(strno)
elif guid == GUIDS['ASF_Extended_Stream_Properties_Object']:
streamid, langid, frametime = struct.unpack('<HHQ', s[72:84])
(bitrate,) = struct.unpack('<I', s[40:40 + 4])
if streamid not in self._extinfo:
self._extinfo[streamid] = [None, None, None, {}]
if frametime == 0:
# Problaby VFR, report as 1000fps (which is what MPlayer does)
frametime = 10000.0
self._extinfo[streamid][:3] = [bitrate, 10000000.0 / frametime, langid]
self._apply_extinfo(streamid)
elif guid == GUIDS['ASF_Header_Extension_Object']:
log.debug(u'ASF_Header_Extension_Object %d' % objsize)
size = struct.unpack('<I', s[42:46])[0]
data = s[46:46 + size]
while len(data):
log.debug(u'Sub:')
h = self._getnextheader(data)
data = data[h[1]:]
elif guid == GUIDS['ASF_Codec_List_Object']:
log.debug(u'List Object')
pass
elif guid == GUIDS['ASF_Error_Correction_Object']:
log.debug(u'Error Correction')
pass
elif guid == GUIDS['ASF_Content_Description_Object']:
log.debug(u'Content Description Object')
val = struct.unpack('<5H', s[24:24 + 10])
pos = 34
strings = []
for i in val:
ss = s[pos:pos + i].replace('\0', '').lstrip().rstrip()
strings.append(ss)
pos += i
# Set empty strings to None
strings = [x or None for x in strings]
self.title, self.artist, self.copyright, self.caption, rating = strings
elif guid == GUIDS['ASF_Extended_Content_Description_Object']:
(count,) = struct.unpack('<H', s[24:26])
pos = 26
descriptor = {}
for i in range(0, count):
# Read additional content descriptors
d = self._parsekv(s[pos:])
pos += d[0]
descriptor[d[1]] = d[2]
self._appendtable('ASFDESCRIPTOR', descriptor)
elif guid == GUIDS['ASF_Metadata_Object']:
(count,) = struct.unpack('<H', s[24:26])
pos = 26
streams = {}
for i in range(0, count):
# Read additional content descriptors
size, key, value, strno = self._parsekv2(s[pos:])
if strno not in streams:
streams[strno] = {}
streams[strno][key] = value
pos += size
for strno, metadata in streams.items():
if strno not in self._extinfo:
self._extinfo[strno] = [None, None, None, {}]
self._extinfo[strno][3].update(metadata)
self._apply_extinfo(strno)
elif guid == GUIDS['ASF_Language_List_Object']:
count = struct.unpack('<H', s[24:26])[0]
pos = 26
for i in range(0, count):
idlen = struct.unpack('<B', s[pos:pos + 1])[0]
idstring = s[pos + 1:pos + 1 + idlen]
idstring = unicode(idstring, 'utf-16').replace('\0', '')
log.debug(u'Language: %d/%d: %r' % (i + 1, count, idstring))
self._languages.append(idstring)
pos += 1 + idlen
elif guid == GUIDS['ASF_Stream_Bitrate_Properties_Object']:
# This record contains stream bitrate with payload overhead. For
# audio streams, we should have the average bitrate from
# ASF_Stream_Properties_Object. For video streams, we get it from
# ASF_Extended_Stream_Properties_Object. So this record is not
# used.
pass
elif guid == GUIDS['ASF_Content_Encryption_Object'] or \
guid == GUIDS['ASF_Extended_Content_Encryption_Object']:
self._set('encrypted', True)
else:
# Just print the type:
for h in GUIDS.keys():
if GUIDS[h] == guid:
log.debug(u'Unparsed %r [%d]' % (h, objsize))
break
else:
u = "%.8X-%.4X-%.4X-%.2X%.2X-%s" % guid
log.debug(u'unknown: len=%d [%d]' % (len(u), objsize))
return r
class AsfAudio(core.AudioStream):
"""
ASF audio parser for wma files.
"""
def __init__(self):
core.AudioStream.__init__(self)
self.mime = 'audio/x-ms-asf'
self.type = 'asf format'
def Parser(file):
"""
Wrapper around audio and av content.
"""
asf = Asf(file)
if not len(asf.audio) or len(asf.video):
# AV container
return asf
# No video but audio streams. Handle has audio core
audio = AsfAudio()
for key in audio._keys:
if key in asf._keys:
if not getattr(audio, key, None):
setattr(audio, key, getattr(asf, key))
return audio
|
VinceZK/phantomjs | refs/heads/decktape | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/urlfetcher.py | 165 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Wrapper module for fetching URLs."""
import urllib
class UrlFetcher(object):
"""Class with restricted interface to fetch URLs (makes testing easier)"""
def __init__(self, filesystem):
self._filesystem = filesystem
def fetch(self, url):
"""Fetches the contents of the URL as a string."""
file_object = urllib.urlopen(url)
content = file_object.read()
file_object.close()
return content
def fetch_into_file(self, url):
"""Fetches the contents of the URL into a temporary file and return the filename.
This is the equivalent of urllib.retrieve() except that we don't return any headers.
"""
file_object, filename = self._filesystem.open_binary_tempfile('-fetched')
contents = self.fetch(url)
file_object.write(contents)
file_object.close()
return filename
|
LinuxChristian/home-assistant | refs/heads/dev | homeassistant/components/alarm_control_panel/alarmdotcom.py | 8 | """
Interfaces with Alarm.com alarm control panels.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/alarm_control_panel.alarmdotcom/
"""
import logging
import asyncio
import voluptuous as vol
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_PASSWORD, CONF_USERNAME, STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_UNKNOWN, CONF_CODE,
CONF_NAME)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
REQUIREMENTS = ['pyalarmdotcom==0.3.0']
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Alarm.com'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_CODE): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up a Alarm.com control panel."""
name = config.get(CONF_NAME)
code = config.get(CONF_CODE)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
alarmdotcom = AlarmDotCom(hass, name, code, username, password)
yield from alarmdotcom.async_login()
async_add_devices([alarmdotcom])
class AlarmDotCom(alarm.AlarmControlPanel):
"""Represent an Alarm.com status."""
def __init__(self, hass, name, code, username, password):
"""Initialize the Alarm.com status."""
from pyalarmdotcom import Alarmdotcom
_LOGGER.debug('Setting up Alarm.com...')
self._hass = hass
self._name = name
self._code = str(code) if code else None
self._username = username
self._password = password
self._websession = async_get_clientsession(self._hass)
self._state = STATE_UNKNOWN
self._alarm = Alarmdotcom(username,
password,
self._websession,
hass.loop)
@asyncio.coroutine
def async_login(self):
"""Login to Alarm.com."""
yield from self._alarm.async_login()
@asyncio.coroutine
def async_update(self):
"""Fetch the latest state."""
yield from self._alarm.async_update()
return self._alarm.state
@property
def name(self):
"""Return the name of the alarm."""
return self._name
@property
def code_format(self):
"""One or more characters if code is defined."""
return None if self._code is None else '.+'
@property
def state(self):
"""Return the state of the device."""
if self._alarm.state.lower() == 'disarmed':
return STATE_ALARM_DISARMED
elif self._alarm.state.lower() == 'armed stay':
return STATE_ALARM_ARMED_HOME
elif self._alarm.state.lower() == 'armed away':
return STATE_ALARM_ARMED_AWAY
return STATE_UNKNOWN
@asyncio.coroutine
def async_alarm_disarm(self, code=None):
"""Send disarm command."""
if self._validate_code(code):
yield from self._alarm.async_alarm_disarm()
@asyncio.coroutine
def async_alarm_arm_home(self, code=None):
"""Send arm hom command."""
if self._validate_code(code):
yield from self._alarm.async_alarm_arm_home()
@asyncio.coroutine
def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
if self._validate_code(code):
yield from self._alarm.async_alarm_arm_away()
def _validate_code(self, code):
"""Validate given code."""
check = self._code is None or code == self._code
if not check:
_LOGGER.warning('Wrong code entered.')
return check
|
multikatt/CouchPotatoServer | refs/heads/master | couchpotato/core/downloaders/putio/__init__.py | 34 | from .main import PutIO
def autoload():
return PutIO()
config = [{
'name': 'putio',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
'name': 'putio',
'label': 'put.io',
'description': 'This will start a torrent download on <a href="http://put.io">Put.io</a>.',
'wizard': True,
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'torrent',
},
{
'name': 'oauth_token',
'label': 'oauth_token',
'description': 'This is the OAUTH_TOKEN from your putio API',
'advanced': True,
},
{
'name': 'folder',
'description': ('The folder on putio where you want the upload to go','Will find the first first folder that matches this name'),
'default': 0,
},
{
'name': 'callback_host',
'description': 'External reachable url to CP so put.io can do it\'s thing',
},
{
'name': 'download',
'description': 'Set this to have CouchPotato download the file from Put.io',
'type': 'bool',
'default': 0,
},
{
'name': 'delete_file',
'description': ('Set this to remove the file from putio after sucessful download','Does nothing if you don\'t select download'),
'type': 'bool',
'default': 0,
},
{
'name': 'download_dir',
'type': 'directory',
'label': 'Download Directory',
'description': 'The Directory to download files to, does nothing if you don\'t select download',
},
{
'name': 'manual',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
],
}
],
}]
|
PaulMcMillan/shmoocon_2014_talk | refs/heads/master | caravan/caravan/dashboards/tasks/rdp/urls.py | 40 | from django.conf.urls.defaults import patterns # noqa
from django.conf.urls.defaults import url # noqa
from .views import IndexView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
)
|
levilucio/SyVOLT | refs/heads/master | UMLRT2Kiltera_MM/MT_post__Capsule.py | 1 | """
__MT_post__Capsule.py_____________________________________________________
Automatically generated AToM3 syntactic object (DO NOT MODIFY DIRECTLY)
Author: gehan
Modified: Sun Feb 15 10:31:26 2015
__________________________________________________________________________
"""
from ASGNode import *
from ATOM3Type import *
from ATOM3Text import *
from ATOM3String import *
from graph_MT_post__Capsule import *
class MT_post__Capsule(ASGNode, ATOM3Type):
def __init__(self, parent = None):
ASGNode.__init__(self)
ATOM3Type.__init__(self)
self.superTypes = ['MT_post__NamedElement', 'MT_post__MetaModelElement_S']
self.graphClass_ = graph_MT_post__Capsule
self.isGraphObjectVisual = True
if(hasattr(self, '_setHierarchicalLink')):
self._setHierarchicalLink(False)
if(hasattr(self, '_setHierarchicalNode')):
self._setHierarchicalNode(False)
self.parent = parent
self.MT_post__cardinality=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__cardinality=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__cardinality=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__classtype=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__classtype=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__classtype=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__name=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__name=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_post__name=ATOM3Text('\n#===============================================================================\n# You can access the value of the current node\'s attribute value by: attr_value.\n# If the current node shall be created you MUST initialize it here!\n# You can access a node labelled n by: PreNode(\'n\').\n# To access attribute x of node n, use: PreNode(\'n\')[\'x\'].\n# Note that the attribute values are those before the match is rewritten.\n# The order in which this code is executed depends on the label value\n# of the encapsulating node.\n# The given action must return the new value of the attribute.\n#===============================================================================\n\nreturn attr_value\n', 80,15 )
self.MT_label__=ATOM3String('', 20)
self.MT_pivotOut__=ATOM3String('', 20)
self.generatedAttributes = {'MT_post__cardinality': ('ATOM3Text', ),
'MT_post__cardinality': ('ATOM3Text', ),
'MT_post__cardinality': ('ATOM3Text', ),
'MT_post__classtype': ('ATOM3Text', ),
'MT_post__classtype': ('ATOM3Text', ),
'MT_post__classtype': ('ATOM3Text', ),
'MT_post__name': ('ATOM3Text', ),
'MT_post__name': ('ATOM3Text', ),
'MT_post__name': ('ATOM3Text', ),
'MT_label__': ('ATOM3String', ),
'MT_pivotOut__': ('ATOM3String', ) }
self.realOrder = ['MT_post__cardinality','MT_post__cardinality','MT_post__cardinality','MT_post__classtype','MT_post__classtype','MT_post__classtype','MT_post__name','MT_post__name','MT_post__name','MT_label__','MT_pivotOut__']
self.directEditing = [0,0,0,0,0,0,0,0,0,1,1]
def clone(self):
cloneObject = MT_post__Capsule( self.parent )
for atr in self.realOrder:
cloneObject.setAttrValue(atr, self.getAttrValue(atr).clone() )
ASGNode.cloneActions(self, cloneObject)
return cloneObject
def copy(self, other):
ATOM3Type.copy(self, other)
for atr in self.realOrder:
self.setAttrValue(atr, other.getAttrValue(atr) )
ASGNode.copy(self, other)
def preCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.preCondition(actionID, params)
else: return None
def postCondition (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postCondition(actionID, params)
else: return None
def preAction (self, actionID, * params):
if actionID == self.CREATE:
self.autoIncrLabel(params)
if self.graphObject_:
return self.graphObject_.preAction(actionID, params)
else: return None
def postAction (self, actionID, * params):
if self.graphObject_:
return self.graphObject_.postAction(actionID, params)
else: return None
def QOCA(self, params):
"""
QOCA Constraint Template
NOTE: DO NOT select a POST/PRE action trigger
Constraints will be added/removed in a logical manner by other mechanisms.
"""
return # <---- Remove this to use QOCA
""" Get the high level constraint helper and solver """
from Qoca.atom3constraints.OffsetConstraints import OffsetConstraints
oc = OffsetConstraints(self.parent.qocaSolver)
"""
Example constraint, see Kernel/QOCA/atom3constraints/OffsetConstraints.py
For more types of constraints
"""
oc.fixedWidth(self.graphObject_, self.graphObject_.sizeX)
oc.fixedHeight(self.graphObject_, self.graphObject_.sizeY)
def autoIncrLabel(self, params):
#===============================================================================
# Auto increment the label
#===============================================================================
# If there is already one, ignore
if not self.MT_label__.isNone(): return
# Get the maximum label of all MT_pre__ elements
label = 0
for nt in self.parent.ASGroot.listNodes:
if nt.startswith('MT_post__'):
for node in self.parent.ASGroot.listNodes[nt]:
currLabel = 0
try:
currLabel = int(node.MT_label__.getValue())
except:
pass
if currLabel > label:
label = currLabel
# The label of this instance will be the max label + 1
self.MT_label__.setValue(str(label + 1))
|
cetic/ansible | refs/heads/devel | lib/ansible/plugins/lookup/consul_kv.py | 63 | # (c) 2015, Steve Gargan <steve.gargan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Lookup plugin to grab metadata from a consul key value store.
============================================================
Plugin will lookup metadata for a playbook from the key value store in a
consul cluster. Values can be easily set in the kv store with simple rest
commands e.g.
curl -X PUT -d 'some-value' http://localhost:8500/v1/kv/ansible/somedata
this can then be looked up in a playbook as follows
- debug: msg='key contains {{item}}'
with_consul_kv:
- 'key/to/retrieve'
Parameters can be provided after the key be more specific about what to retrieve e.g.
- debug: msg='key contains {{item}}'
with_consul_kv:
- 'key/to recurse=true token=E6C060A9-26FB-407A-B83E-12DDAFCB4D98')}}'
recurse: if true, will retrieve all the values that have the given key as prefix
index: if the key has a value with the specified index then this is returned
allowing access to historical values.
token: acl token to allow access to restricted values.
By default this will lookup keys via the consul agent running on http://localhost:8500
this can be changed by setting the env variable 'ANSIBLE_CONSUL_URL' to point to the url
of the kv store you'd like to use.
'''
######################################################################
import os
import sys
from urlparse import urlparse
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
try:
import json
except ImportError:
import simplejson as json
try:
import consul
HAS_CONSUL = True
except ImportError as e:
HAS_CONSUL = False
class LookupModule(LookupBase):
def __init__(self, loader=None, templar=None, **kwargs):
super(LookupModule, self).__init__(loader, templar, **kwargs)
self.agent_url = 'http://localhost:8500'
if os.getenv('ANSIBLE_CONSUL_URL') is not None:
self.agent_url = os.environ['ANSIBLE_CONSUL_URL']
def run(self, terms, variables=None, **kwargs):
if not HAS_CONSUL:
raise AnsibleError('python-consul is required for consul_kv lookup. see http://python-consul.readthedocs.org/en/latest/#installation')
u = urlparse(self.agent_url)
consul_api = consul.Consul(host=u.hostname, port=u.port)
values = []
try:
for term in terms:
params = self.parse_params(term)
results = consul_api.kv.get(params['key'],
token=params['token'],
index=params['index'],
recurse=params['recurse'])
if results[1]:
# responds with a single or list of result maps
if isinstance(results[1], list):
for r in results[1]:
values.append(r['Value'])
else:
values.append(results[1]['Value'])
except Exception as e:
raise AnsibleError(
"Error locating '%s' in kv store. Error was %s" % (term, e))
return values
def parse_params(self, term):
params = term.split(' ')
paramvals = {
'key': params[0],
'token': None,
'recurse': False,
'index': None
}
# parameters specified?
try:
for param in params[1:]:
if param and len(param) > 0:
name, value = param.split('=')
assert name in paramvals, "%s not a valid consul lookup parameter" % name
paramvals[name] = value
except (ValueError, AssertionError) as e:
raise AnsibleError(e)
return paramvals
|
ping/instagram_private_api | refs/heads/master | tests/web/upload.py | 1 | import unittest
import time
try:
# python 2.x
from urllib2 import urlopen
except ImportError:
# python 3.x
from urllib.request import urlopen
import json
from ..common import WebApiTestBase, MockResponse, compat_mock
class UploadTests(WebApiTestBase):
"""Tests for ClientCompatPatch."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_post_photo',
'test': UploadTests('test_post_photo', api),
},
{
'name': 'test_post_photo_mock',
'test': UploadTests('test_post_photo_mock', api),
},
]
@unittest.skip('Modifies data')
def test_post_photo(self):
sample_url = 'https://c1.staticflickr.com/5/4103/5059663679_85a7ec3f63_b.jpg'
res = urlopen(sample_url)
photo_data = res.read()
results = self.api.post_photo(photo_data, caption='Feathers #feathers')
self.assertEqual(results.get('status'), 'ok')
self.assertIsNotNone(results.get('media'))
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_post_photo_mock(self, make_request):
ts_now = time.time()
make_request.return_value = {'status': 'ok', 'upload_id': '123456789'}
with compat_mock.patch(
'instagram_web_api.client.compat_urllib_request.OpenerDirector.open') as opener, \
compat_mock.patch('instagram_web_api.client.time.time') as time_mock, \
compat_mock.patch('instagram_web_api.client.random.choice') as rand_choice, \
compat_mock.patch('instagram_web_api.Client._read_response') as read_response, \
compat_mock.patch(
'instagram_web_api.client.compat_urllib_request.Request') as request:
opener.return_value = MockResponse()
time_mock.return_value = ts_now
rand_choice.return_value = 'x'
# add rhx_gis so that we can reuse the same response for init and uploading
read_response.return_value = json.dumps(
{'status': 'ok', 'upload_id': '123456789', 'rhx_gis': '22aea71b163e335a0ad4479549b530d7'},
separators=(',', ':')
)
self.api.post_photo('...'.encode('ascii'), caption='Test')
headers = {
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Origin': 'https://www.instagram.com',
'x-csrftoken': self.api.csrftoken,
'x-instagram-ajax': '1',
'Accept': '*/*',
'User-Agent': self.api.mobile_user_agent,
'Referer': 'https://www.instagram.com/create/details/',
'x-requested-with': 'XMLHttpRequest',
'Connection': 'close',
'Content-Type': 'application/x-www-form-urlencoded'}
body = '--{boundary}\r\n' \
'Content-Disposition: form-data; name="upload_id"\r\n\r\n' \
'{upload_id}\r\n' \
'--{boundary}\r\n' \
'Content-Disposition: form-data; name="media_type"\r\n\r\n1\r\n' \
'--{boundary}\r\n' \
'Content-Disposition: form-data; name="photo"; filename="photo.jpg"\r\n' \
'Content-Type: application/octet-stream\r\n' \
'Content-Transfer-Encoding: binary\r\n\r\n...\r\n' \
'--{boundary}--\r\n'.format(
boundary='----WebKitFormBoundary{}'.format('x' * 16),
upload_id=int(ts_now * 1000))
request.assert_called_with(
'https://www.instagram.com/create/upload/photo/',
body.encode('utf-8'), headers=headers)
|
hackersql/sq1map | refs/heads/master | plugins/dbms/sqlite/__init__.py | 3 | #!/usr/bin/env python
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import DBMS
from lib.core.settings import SQLITE_SYSTEM_DBS
from lib.core.unescaper import unescaper
from plugins.dbms.sqlite.enumeration import Enumeration
from plugins.dbms.sqlite.filesystem import Filesystem
from plugins.dbms.sqlite.fingerprint import Fingerprint
from plugins.dbms.sqlite.syntax import Syntax
from plugins.dbms.sqlite.takeover import Takeover
from plugins.generic.misc import Miscellaneous
class SQLiteMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
"""
This class defines SQLite methods
"""
def __init__(self):
self.excludeDbsList = SQLITE_SYSTEM_DBS
Syntax.__init__(self)
Fingerprint.__init__(self)
Enumeration.__init__(self)
Filesystem.__init__(self)
Miscellaneous.__init__(self)
Takeover.__init__(self)
unescaper[DBMS.SQLITE] = Syntax.escape
|
trankmichael/numpy | refs/heads/master | numpy/distutils/extension.py | 162 | """distutils.extension
Provides the Extension class, used to describe C/C++ extension
modules in setup scripts.
Overridden to support f2py.
"""
from __future__ import division, absolute_import, print_function
import sys
import re
from distutils.extension import Extension as old_Extension
if sys.version_info[0] >= 3:
basestring = str
cxx_ext_re = re.compile(r'.*[.](cpp|cxx|cc)\Z', re.I).match
fortran_pyf_ext_re = re.compile(r'.*[.](f90|f95|f77|for|ftn|f|pyf)\Z', re.I).match
class Extension(old_Extension):
def __init__ (self, name, sources,
include_dirs=None,
define_macros=None,
undef_macros=None,
library_dirs=None,
libraries=None,
runtime_library_dirs=None,
extra_objects=None,
extra_compile_args=None,
extra_link_args=None,
export_symbols=None,
swig_opts=None,
depends=None,
language=None,
f2py_options=None,
module_dirs=None,
extra_f77_compile_args=None,
extra_f90_compile_args=None,
):
old_Extension.__init__(self, name, [],
include_dirs,
define_macros,
undef_macros,
library_dirs,
libraries,
runtime_library_dirs,
extra_objects,
extra_compile_args,
extra_link_args,
export_symbols)
# Avoid assert statements checking that sources contains strings:
self.sources = sources
# Python 2.4 distutils new features
self.swig_opts = swig_opts or []
# swig_opts is assumed to be a list. Here we handle the case where it
# is specified as a string instead.
if isinstance(self.swig_opts, basestring):
import warnings
msg = "swig_opts is specified as a string instead of a list"
warnings.warn(msg, SyntaxWarning)
self.swig_opts = self.swig_opts.split()
# Python 2.3 distutils new features
self.depends = depends or []
self.language = language
# numpy_distutils features
self.f2py_options = f2py_options or []
self.module_dirs = module_dirs or []
self.extra_f77_compile_args = extra_f77_compile_args or []
self.extra_f90_compile_args = extra_f90_compile_args or []
return
def has_cxx_sources(self):
for source in self.sources:
if cxx_ext_re(str(source)):
return True
return False
def has_f2py_sources(self):
for source in self.sources:
if fortran_pyf_ext_re(source):
return True
return False
# class Extension
|
BigBrother1984/android_external_chromium_org | refs/heads/kitkat | chrome/tools/build/win/scan_server_dlls.py | 79 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script used to scan for server DLLs at build time and build a header
included by setup.exe. This header contains an array of the names of
the DLLs that need registering at install time.
"""
import ConfigParser
import glob
import optparse
import os
import sys
CHROME_DIR = "Chrome-bin"
SERVERS_DIR = "servers"
GENERATED_DLL_INCLUDE_FILE_NAME = "registered_dlls.h"
GENERATED_DLL_INCLUDE_FILE_CONTENTS = """
// This file is automatically generated by scan_server_dlls.py.
// It contains the list of COM server dlls that need registering at
// install time.
#include "base/basictypes.h"
namespace {
const wchar_t* kDllsToRegister[] = { %s };
const int kNumDllsToRegister = %d;
}
"""
def Readconfig(output_dir, input_file):
"""Reads config information from input file after setting default value of
global variabes.
"""
variables = {}
variables['ChromeDir'] = CHROME_DIR
# Use a bogus version number, we don't really care what it is, we just
# want to find the files that would get picked up from chrome.release,
# and don't care where the installer archive task ends up putting them.
variables['VersionDir'] = os.path.join(variables['ChromeDir'],
'0.0.0.0')
config = ConfigParser.SafeConfigParser(variables)
print "Reading input_file: " + input_file
config.read(input_file)
return config
def CreateRegisteredDllIncludeFile(registered_dll_list, header_output_dir):
""" Outputs the header file included by the setup project that
contains the names of the DLLs to be registered at installation
time.
"""
output_file = os.path.join(header_output_dir, GENERATED_DLL_INCLUDE_FILE_NAME)
dll_array_string = ""
for dll in registered_dll_list:
dll.replace("\\", "\\\\")
if dll_array_string:
dll_array_string += ', '
dll_array_string += "L\"%s\"" % dll
if len(registered_dll_list) == 0:
contents = GENERATED_DLL_INCLUDE_FILE_CONTENTS % ("L\"\"", 0)
else:
contents = GENERATED_DLL_INCLUDE_FILE_CONTENTS % (dll_array_string,
len(registered_dll_list))
# Don't rewrite the header file if we don't need to.
try:
old_file = open(output_file, 'r')
except EnvironmentError:
old_contents = None
else:
old_contents = old_file.read()
old_file.close()
if contents != old_contents:
print 'Updating server dll header: ' + str(output_file)
open(output_file, 'w').write(contents)
def ScanServerDlls(config, distribution, output_dir):
"""Scans for DLLs in the specified section of config that are in the
subdirectory of output_dir named SERVERS_DIR. Returns a list of only the
filename components of the paths to all matching DLLs.
"""
print "Scanning for server DLLs in " + output_dir
registered_dll_list = []
ScanDllsInSection(config, 'GENERAL', output_dir, registered_dll_list)
if distribution:
if len(distribution) > 1 and distribution[0] == '_':
distribution = distribution[1:]
ScanDllsInSection(config, distribution.upper(), output_dir,
registered_dll_list)
return registered_dll_list
def ScanDllsInSection(config, section, output_dir, registered_dll_list):
"""Scans for DLLs in the specified section of config that are in the
subdirectory of output_dir named SERVERS_DIR. Appends the file name of all
matching dlls to registered_dll_list.
"""
for option in config.options(section):
if option.endswith('dir'):
continue
dst = config.get(section, option)
(x, src_folder) = os.path.split(dst)
for file in glob.glob(os.path.join(output_dir, option)):
if option.startswith(SERVERS_DIR):
(x, file_name) = os.path.split(file)
if file_name.lower().endswith('.dll'):
print "Found server DLL file: " + file_name
registered_dll_list.append(file_name)
def RunSystemCommand(cmd):
if (os.system(cmd) != 0):
raise "Error while running cmd: %s" % cmd
def main():
"""Main method that reads input file, scans <build_output>\servers for
matches to files described in the input file. A header file for the
setup project is then generated.
"""
option_parser = optparse.OptionParser()
option_parser.add_option('-o', '--output_dir', help='Build Output directory')
option_parser.add_option('-x', '--header_output_dir',
help='Location where the generated header file will be placed.')
option_parser.add_option('-i', '--input_file', help='Input file')
option_parser.add_option('-d', '--distribution',
help='Name of Chromium Distribution. Optional.')
options, args = option_parser.parse_args()
config = Readconfig(options.output_dir, options.input_file)
registered_dll_list = ScanServerDlls(config, options.distribution,
options.output_dir)
CreateRegisteredDllIncludeFile(registered_dll_list,
options.header_output_dir)
return 0
if '__main__' == __name__:
sys.exit(main())
|
lgarren/spack | refs/heads/develop | var/spack/repos/builtin/packages/ghostscript-fonts/package.py | 3 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
class GhostscriptFonts(Package):
"""Ghostscript Fonts"""
homepage = "http://ghostscript.com/"
url = "https://www.imagemagick.org/download/delegates/ghostscript-fonts-std-8.11.tar.gz"
version('8.11', '6865682b095f8c4500c54b285ff05ef6')
def install(self, spec, prefix):
fdir = join_path(prefix.share, 'font')
mkdirp(fdir)
files = glob.glob('*')
for f in files:
if not f.startswith('spack-build'):
install(f, fdir)
|
makaaso/aws-lambda-tools | refs/heads/master | autoscaling-adjust-size/docutils/parsers/rst/languages/gl.py | 130 | # -*- coding: utf-8 -*-
# Author: David Goodger
# Contact: goodger@users.sourceforge.net
# Revision: $Revision: 4229 $
# Date: $Date: 2005-12-23 00:46:16 +0100 (Fri, 23 Dec 2005) $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Galician-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'atenci\u00f3n': 'attention',
u'advertencia': 'caution',
u'code (translation required)': 'code',
u'perigo': 'danger',
u'erro': 'error',
u'pista': 'hint',
u'importante': 'important',
u'nota': 'note',
u'consello': 'tip',
u'aviso': 'warning',
u'admonici\u00f3n': 'admonition',
u'barra lateral': 'sidebar',
u't\u00f3pico': 'topic',
u'bloque-li\u00f1a': 'line-block',
u'literal-analizado': 'parsed-literal',
u'r\u00fabrica': 'rubric',
u'ep\u00edgrafe': 'epigraph',
u'realzados': 'highlights',
u'coller-citaci\u00f3n': 'pull-quote',
u'compor': 'compound',
u'recipiente': 'container',
#'questions': 'questions',
u't\u00e1boa': 'table',
u't\u00e1boa-csv': 'csv-table',
u't\u00e1boa-listaxe': 'list-table',
#'qa': 'questions',
#'faq': 'questions',
u'meta': 'meta',
'math (translation required)': 'math',
#'imagemap': 'imagemap',
u'imaxe': 'image',
u'figura': 'figure',
u'inclu\u00edr': 'include',
u'cru': 'raw',
u'substitu\u00edr': 'replace',
u'unicode': 'unicode',
u'data': 'date',
u'clase': 'class',
u'regra': 'role',
u'regra-predeterminada': 'default-role',
u't\u00edtulo': 'title',
u'contido': 'contents',
u'seccnum': 'sectnum',
u'secci\u00f3n-numerar': 'sectnum',
u'cabeceira': 'header',
u'p\u00e9 de p\u00e1xina': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
u'notas-destino': 'target-notes',
u'texto restruturado-proba-directiva': 'restructuredtext-test-directive'}
"""Galician name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
u'abreviatura': 'abbreviation',
u'ab': 'abbreviation',
u'acr\u00f3nimo': 'acronym',
u'ac': 'acronym',
u'code (translation required)': 'code',
u'\u00edndice': 'index',
u'i': 'index',
u'sub\u00edndice': 'subscript',
u'sub': 'subscript',
u'super\u00edndice': 'superscript',
u'sup': 'superscript',
u'referencia t\u00edtulo': 'title-reference',
u't\u00edtulo': 'title-reference',
u't': 'title-reference',
u'referencia-pep': 'pep-reference',
u'pep': 'pep-reference',
u'referencia-rfc': 'rfc-reference',
u'rfc': 'rfc-reference',
u'\u00e9nfase': 'emphasis',
u'forte': 'strong',
u'literal': 'literal',
'math (translation required)': 'math',
u'referencia-nome': 'named-reference',
u'referencia-an\u00f3nimo': 'anonymous-reference',
u'referencia-nota ao p\u00e9': 'footnote-reference',
u'referencia-citaci\u00f3n': 'citation-reference',
u'referencia-substituci\u00f3n': 'substitution-reference',
u'destino': 'target',
u'referencia-uri': 'uri-reference',
u'uri': 'uri-reference',
u'url': 'uri-reference',
u'cru': 'raw',}
"""Mapping of Galician role names to canonical role names for interpreted text.
"""
|
arthru/OpenUpgrade | refs/heads/master | openerp/addons/base/tests/test_ir_sequence.py | 136 | # -*- coding: utf-8 -*-
# Run with one of these commands:
# > OPENERP_ADDONS_PATH='../../addons/trunk' OPENERP_PORT=8069 \
# OPENERP_DATABASE=yy PYTHONPATH=. python tests/test_ir_sequence.py
# > OPENERP_ADDONS_PATH='../../addons/trunk' OPENERP_PORT=8069 \
# OPENERP_DATABASE=yy nosetests tests/test_ir_sequence.py
# > OPENERP_ADDONS_PATH='../../../addons/trunk' OPENERP_PORT=8069 \
# OPENERP_DATABASE=yy PYTHONPATH=../:. unit2 test_ir_sequence
# This assume an existing database.
import psycopg2
import psycopg2.errorcodes
import unittest2
import openerp
from openerp.tests import common
DB = common.DB
ADMIN_USER_ID = common.ADMIN_USER_ID
def registry(model):
return openerp.modules.registry.RegistryManager.get(DB)[model]
def cursor():
return openerp.modules.registry.RegistryManager.get(DB).cursor()
def drop_sequence(code):
cr = cursor()
for model in ['ir.sequence', 'ir.sequence.type']:
s = registry(model)
ids = s.search(cr, ADMIN_USER_ID, [('code', '=', code)])
s.unlink(cr, ADMIN_USER_ID, ids)
cr.commit()
cr.close()
class test_ir_sequence_standard(unittest2.TestCase):
""" A few tests for a 'Standard' (i.e. PostgreSQL) sequence. """
def test_ir_sequence_create(self):
""" Try to create a sequence object. """
cr = cursor()
d = dict(code='test_sequence_type', name='Test sequence type')
c = registry('ir.sequence.type').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type', name='Test sequence')
c = registry('ir.sequence').create(cr, ADMIN_USER_ID, d, {})
assert c
cr.commit()
cr.close()
def test_ir_sequence_search(self):
""" Try a search. """
cr = cursor()
ids = registry('ir.sequence').search(cr, ADMIN_USER_ID, [], {})
assert ids
cr.commit()
cr.close()
def test_ir_sequence_draw(self):
""" Try to draw a number. """
cr = cursor()
n = registry('ir.sequence').next_by_code(cr, ADMIN_USER_ID, 'test_sequence_type', {})
assert n
cr.commit()
cr.close()
def test_ir_sequence_draw_twice(self):
""" Try to draw a number from two transactions. """
cr0 = cursor()
cr1 = cursor()
n0 = registry('ir.sequence').next_by_code(cr0, ADMIN_USER_ID, 'test_sequence_type', {})
assert n0
n1 = registry('ir.sequence').next_by_code(cr1, ADMIN_USER_ID, 'test_sequence_type', {})
assert n1
cr0.commit()
cr1.commit()
cr0.close()
cr1.close()
@classmethod
def tearDownClass(cls):
drop_sequence('test_sequence_type')
class test_ir_sequence_no_gap(unittest2.TestCase):
""" Copy of the previous tests for a 'No gap' sequence. """
def test_ir_sequence_create_no_gap(self):
""" Try to create a sequence object. """
cr = cursor()
d = dict(code='test_sequence_type_2', name='Test sequence type')
c = registry('ir.sequence.type').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type_2', name='Test sequence',
implementation='no_gap')
c = registry('ir.sequence').create(cr, ADMIN_USER_ID, d, {})
assert c
cr.commit()
cr.close()
def test_ir_sequence_draw_no_gap(self):
""" Try to draw a number. """
cr = cursor()
n = registry('ir.sequence').next_by_code(cr, ADMIN_USER_ID, 'test_sequence_type_2', {})
assert n
cr.commit()
cr.close()
def test_ir_sequence_draw_twice_no_gap(self):
""" Try to draw a number from two transactions.
This is expected to not work.
"""
cr0 = cursor()
cr1 = cursor()
cr1._default_log_exceptions = False # Prevent logging a traceback
with self.assertRaises(psycopg2.OperationalError) as e:
n0 = registry('ir.sequence').next_by_code(cr0, ADMIN_USER_ID, 'test_sequence_type_2', {})
assert n0
n1 = registry('ir.sequence').next_by_code(cr1, ADMIN_USER_ID, 'test_sequence_type_2', {})
self.assertEqual(e.exception.pgcode, psycopg2.errorcodes.LOCK_NOT_AVAILABLE, msg="postgresql returned an incorrect errcode")
cr0.close()
cr1.close()
@classmethod
def tearDownClass(cls):
drop_sequence('test_sequence_type_2')
class test_ir_sequence_change_implementation(unittest2.TestCase):
""" Create sequence objects and change their ``implementation`` field. """
def test_ir_sequence_1_create(self):
""" Try to create a sequence object. """
cr = cursor()
d = dict(code='test_sequence_type_3', name='Test sequence type')
c = registry('ir.sequence.type').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type_3', name='Test sequence')
c = registry('ir.sequence').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type_4', name='Test sequence type')
c = registry('ir.sequence.type').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type_4', name='Test sequence',
implementation='no_gap')
c = registry('ir.sequence').create(cr, ADMIN_USER_ID, d, {})
assert c
cr.commit()
cr.close()
def test_ir_sequence_2_write(self):
cr = cursor()
ids = registry('ir.sequence').search(cr, ADMIN_USER_ID,
[('code', 'in', ['test_sequence_type_3', 'test_sequence_type_4'])], {})
registry('ir.sequence').write(cr, ADMIN_USER_ID, ids,
{'implementation': 'standard'}, {})
registry('ir.sequence').write(cr, ADMIN_USER_ID, ids,
{'implementation': 'no_gap'}, {})
cr.commit()
cr.close()
def test_ir_sequence_3_unlink(self):
cr = cursor()
ids = registry('ir.sequence').search(cr, ADMIN_USER_ID,
[('code', 'in', ['test_sequence_type_3', 'test_sequence_type_4'])], {})
registry('ir.sequence').unlink(cr, ADMIN_USER_ID, ids, {})
cr.commit()
cr.close()
@classmethod
def tearDownClass(cls):
drop_sequence('test_sequence_type_3')
drop_sequence('test_sequence_type_4')
class test_ir_sequence_generate(unittest2.TestCase):
""" Create sequence objects and generate some values. """
def test_ir_sequence_create(self):
""" Try to create a sequence object. """
cr = cursor()
d = dict(code='test_sequence_type_5', name='Test sequence type')
c = registry('ir.sequence.type').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type_5', name='Test sequence')
c = registry('ir.sequence').create(cr, ADMIN_USER_ID, d, {})
assert c
cr.commit()
cr.close()
cr = cursor()
f = lambda *a: registry('ir.sequence').next_by_code(cr, ADMIN_USER_ID, 'test_sequence_type_5', {})
assert all(str(x) == f() for x in xrange(1,10))
cr.commit()
cr.close()
def test_ir_sequence_create_no_gap(self):
""" Try to create a sequence object. """
cr = cursor()
d = dict(code='test_sequence_type_6', name='Test sequence type')
c = registry('ir.sequence.type').create(cr, ADMIN_USER_ID, d, {})
assert c
d = dict(code='test_sequence_type_6', name='Test sequence')
c = registry('ir.sequence').create(cr, ADMIN_USER_ID, d, {})
assert c
cr.commit()
cr.close()
cr = cursor()
f = lambda *a: registry('ir.sequence').next_by_code(cr, ADMIN_USER_ID, 'test_sequence_type_6', {})
assert all(str(x) == f() for x in xrange(1,10))
cr.commit()
cr.close()
@classmethod
def tearDownClass(cls):
drop_sequence('test_sequence_type_5')
drop_sequence('test_sequence_type_6')
if __name__ == '__main__':
unittest2.main()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
AstroTech/atlassian-python-api | refs/heads/master | examples/jira/jira_jql_query.py | 2 | # coding=utf-8
from atlassian import Jira
JQL = "project = DEMO AND status NOT IN (Closed, Resolved) ORDER BY issuekey"
jira = Jira(url="http://localhost:8080", username="admin", password="admin")
data = jira.jql(JQL)
print(data)
|
arnavgosain/msm8x27 | refs/heads/cm-11.0-next | tools/perf/scripts/python/sctop.py | 11180 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
charleswhchan/ansible | refs/heads/devel | lib/ansible/plugins/action/assert.py | 63 | # Copyright 2012, Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.playbook.conditional import Conditional
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
''' Fail with custom message '''
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if 'that' not in self._task.args:
raise AnsibleError('conditional required in "that" string')
msg = None
if 'msg' in self._task.args:
msg = self._task.args['msg']
# make sure the 'that' items are a list
thats = self._task.args['that']
if not isinstance(thats, list):
thats = [thats]
# Now we iterate over the that items, temporarily assigning them
# to the task's when value so we can evaluate the conditional using
# the built in evaluate function. The when has already been evaluated
# by this point, and is not used again, so we don't care about mangling
# that value now
cond = Conditional(loader=self._loader)
for that in thats:
cond.when = [that]
test_result = cond.evaluate_conditional(templar=self._templar, all_vars=task_vars)
if not test_result:
result['failed'] = True
result['evaluated_to'] = test_result
result['assertion'] = that
if msg:
result['msg'] = msg
return result
result['changed'] = False
result['msg'] = 'all assertions passed'
return result
|
amaozhao/basecms | refs/heads/master | sekizai/tests.py | 6 | from __future__ import with_statement
from difflib import SequenceMatcher
from django import template
from django.conf import settings
from django.template.loader import render_to_string
from sekizai.context import SekizaiContext
from sekizai.helpers import validate_template, get_namespaces, Watcher, get_varname
from sekizai.templatetags.sekizai_tags import (validate_context,
import_processor)
from unittest import TestCase
try:
unicode_compat = unicode
except NameError:
unicode_compat = str
def null_processor(context, data, namespace):
return ''
def namespace_processor(context, data, namespace):
return namespace
class SettingsOverride(object):
"""
Overrides Django settings within a context and resets them to their inital
values on exit.
Example:
with SettingsOverride(DEBUG=True):
# do something
"""
class NULL: pass
def __init__(self, **overrides):
self.overrides = overrides
def __enter__(self):
self.old = {}
for key, value in self.overrides.items():
self.old[key] = getattr(settings, key, self.NULL)
setattr(settings, key, value)
def __exit__(self, type, value, traceback):
for key, value in self.old.items():
if value is self.NULL:
delattr(settings, key)
else:
setattr(settings, key, value)
class Match(tuple): # pragma: no cover
@property
def a(self):
return self[0]
@property
def b(self):
return self[1]
@property
def size(self):
return self[2]
def _backwards_compat_match(thing): # pragma: no cover
if isinstance(thing, tuple):
return Match(thing)
return thing
class BitDiffResult(object):
def __init__(self, status, message):
self.status = status
self.message = message
class BitDiff(object):
"""
Visual aid for failing tests
"""
def __init__(self, expected):
self.expected = [repr(unicode_compat(bit)) for bit in expected]
def test(self, result):
result = [repr(unicode_compat(bit)) for bit in result]
if self.expected == result:
return BitDiffResult(True, "success")
else: # pragma: no cover
longest = max([len(x) for x in self.expected] + [len(x) for x in result] + [len('Expected')])
sm = SequenceMatcher()
sm.set_seqs(self.expected, result)
matches = sm.get_matching_blocks()
lasta = 0
lastb = 0
data = []
for match in [_backwards_compat_match(match) for match in matches]:
unmatcheda = self.expected[lasta:match.a]
unmatchedb = result[lastb:match.b]
unmatchedlen = max([len(unmatcheda), len(unmatchedb)])
unmatcheda += ['' for x in range(unmatchedlen)]
unmatchedb += ['' for x in range(unmatchedlen)]
for i in range(unmatchedlen):
data.append((False, unmatcheda[i], unmatchedb[i]))
for i in range(match.size):
data.append((True, self.expected[match.a + i], result[match.b + i]))
lasta = match.a + match.size
lastb = match.b + match.size
padlen = (longest - len('Expected'))
padding = ' ' * padlen
line1 = '-' * padlen
line2 = '-' * (longest - len('Result'))
msg = '\nExpected%s | | Result' % padding
msg += '\n--------%s-|---|-------%s' % (line1, line2)
for success, a, b in data:
pad = ' ' * (longest - len(a))
if success:
msg += '\n%s%s | | %s' % (a, pad, b)
else:
msg += '\n%s%s | ! | %s' % (a, pad, b)
return BitDiffResult(False, msg)
class SekizaiTestCase(TestCase):
def _render(self, tpl, ctx={}, ctxclass=SekizaiContext):
return render_to_string(tpl, ctxclass(ctx))
def _get_bits(self, tpl, ctx={}, ctxclass=SekizaiContext):
rendered = self._render(tpl, ctx, ctxclass)
bits = [bit for bit in [bit.strip('\n') for bit in rendered.split('\n')] if bit]
return bits, rendered
def _test(self, tpl, res, ctx={}, ctxclass=SekizaiContext):
"""
Helper method to render template and compare it's bits
"""
bits, rendered = self._get_bits(tpl, ctx, ctxclass)
differ = BitDiff(res)
result = differ.test(bits)
self.assertTrue(result.status, result.message)
return rendered
def test_basic_dual_block(self):
"""
Basic dual block testing
"""
bits = ['my css file', 'some content', 'more content',
'final content', 'my js file']
self._test('basic.html', bits)
def test_named_endaddtoblock(self):
"""
Testing with named endaddblock
"""
bits = ["mycontent"]
self._test('named_end.html', bits)
def test_eat_content_before_render_block(self):
"""
Testing that content get's eaten if no render_blocks is available
"""
bits = ["mycontent"]
self._test("eat.html", bits)
def test_sekizai_context_required(self):
"""
Test that the template tags properly fail if not used with either
SekizaiContext or the context processor.
"""
self.assertRaises(template.TemplateSyntaxError, self._render, 'basic.html', {}, template.Context)
def test_complex_template_inheritance(self):
"""
Test that (complex) template inheritances work properly
"""
bits = [
"head start",
"some css file",
"head end",
"include start",
"inc add js",
"include end",
"block main start",
"extinc",
"block main end",
"body pre-end",
"inc js file",
"body end"
]
self._test("inherit/extend.html", bits)
"""
Test that blocks (and block.super) work properly with sekizai
"""
bits = [
"head start",
"visible css file",
"some css file",
"head end",
"include start",
"inc add js",
"include end",
"block main start",
"block main base contents",
"more contents",
"block main end",
"body pre-end",
"inc js file",
"body end"
]
self._test("inherit/super_blocks.html", bits)
def test_namespace_isolation(self):
"""
Tests that namespace isolation works
"""
bits = ["the same file", "the same file"]
self._test('namespaces.html', bits)
def test_variable_namespaces(self):
"""
Tests variables and filtered variables as block names.
"""
bits = ["file one", "file two"]
self._test('variables.html', bits, {'blockname': 'one'})
def test_invalid_addtoblock(self):
"""
Tests that template syntax errors are raised properly in templates
rendered by sekizai tags
"""
self.assertRaises(template.TemplateSyntaxError, self._render, 'errors/failadd.html')
def test_invalid_renderblock(self):
self.assertRaises(template.TemplateSyntaxError, self._render, 'errors/failrender.html')
def test_invalid_include(self):
self.assertRaises(template.TemplateSyntaxError, self._render, 'errors/failinc.html')
def test_invalid_basetemplate(self):
self.assertRaises(template.TemplateSyntaxError, self._render, 'errors/failbase.html')
def test_invalid_basetemplate_two(self):
self.assertRaises(template.TemplateSyntaxError, self._render, 'errors/failbase2.html')
def test_with_data(self):
"""
Tests the with_data/add_data tags.
"""
bits = ["1", "2"]
self._test('with_data.html', bits)
def test_easy_inheritance(self):
self.assertEqual('content', self._render("easy_inherit.html").strip())
def test_validate_context(self):
sekizai_ctx = SekizaiContext()
django_ctx = template.Context()
self.assertRaises(template.TemplateSyntaxError, validate_context, django_ctx)
self.assertEqual(validate_context(sekizai_ctx), True)
with SettingsOverride(TEMPLATE_DEBUG=False):
self.assertEqual(validate_context(django_ctx), False)
self.assertEqual(validate_context(sekizai_ctx), True)
bits = ['some content', 'more content', 'final content']
self._test('basic.html', bits, ctxclass=template.Context)
def test_post_processor_null(self):
bits = ['header', 'footer']
self._test('processors/null.html', bits)
def test_post_processor_namespace(self):
bits = ['header', 'footer', 'js']
self._test('processors/namespace.html', bits)
def test_import_processor_failfast(self):
self.assertRaises(TypeError, import_processor, 'invalidpath')
def test_unique(self):
bits = ['unique data']
self._test('unique.html', bits)
def test_strip(self):
tpl = template.Template("""
{% load sekizai_tags %}
{% addtoblock 'a' strip %} test{% endaddtoblock %}
{% addtoblock 'a' strip %}test {% endaddtoblock %}
{% render_block 'a' %}""")
context = SekizaiContext()
output = tpl.render(context)
self.assertEqual(output.count('test'), 1, output)
class HelperTests(TestCase):
def test_validate_template_js_css(self):
self.assertTrue(validate_template('basic.html', ['js', 'css']))
def test_validate_template_js(self):
self.assertTrue(validate_template('basic.html', ['js']))
def test_validate_template_css(self):
self.assertTrue(validate_template('basic.html', ['css']))
def test_validate_template_empty(self):
self.assertTrue(validate_template('basic.html', []))
def test_validate_template_notfound(self):
self.assertFalse(validate_template('basic.html', ['notfound']))
def test_get_namespaces_easy_inherit(self):
self.assertEqual(get_namespaces('easy_inherit.html'), ['css'])
def test_get_namespaces_chain_inherit(self):
self.assertEqual(get_namespaces('inherit/chain.html'), ['css', 'js'])
def test_get_namespaces_space_chain_inherit(self):
self.assertEqual(get_namespaces('inherit/spacechain.html'), ['css', 'js'])
def test_get_namespaces_var_inherit(self):
self.assertEqual(get_namespaces('inherit/varchain.html'), [])
def test_get_namespaces_sub_var_inherit(self):
self.assertEqual(get_namespaces('inherit/subvarchain.html'), [])
def test_get_namespaces_null_ext(self):
self.assertEqual(get_namespaces('inherit/nullext.html'), [])
def test_deactivate_validate_template(self):
with SettingsOverride(SEKIZAI_IGNORE_VALIDATION=True):
self.assertTrue(validate_template('basic.html', ['js', 'css']))
self.assertTrue(validate_template('basic.html', ['js']))
self.assertTrue(validate_template('basic.html', ['css']))
self.assertTrue(validate_template('basic.html', []))
self.assertTrue(validate_template('basic.html', ['notfound']))
def test_watcher_add_namespace(self):
context = SekizaiContext()
watcher = Watcher(context)
varname = get_varname()
context[varname]['key'].append('value')
changes = watcher.get_changes()
self.assertEqual(changes, {'key': ['value']})
def test_watcher_add_data(self):
context = SekizaiContext()
varname = get_varname()
context[varname]['key'].append('value')
watcher = Watcher(context)
context[varname]['key'].append('value2')
changes = watcher.get_changes()
self.assertEqual(changes, {'key': ['value2']})
|
maheshraju-Huawei/actn | refs/heads/master | tools/test/topos/opticalUtils.py | 13 | #!/usr/bin/python
'''
Notes:
This file contains classes and methods useful for integrating LincOE with Mininet,
such as startOE, stopOE, LINCLink, and OpticalSwitch
- $ONOS_ROOT ust be set
- Need to run with sudo -E to preserve ONOS_ROOT env var
- We assume LINC-Config-Generator is named LINC-Config-Generator
- We also assume linc-oe is named linc-oe
- LINC-config-generator and linc-oe must be subdirectories of the user's
home directory
TODO
-----------
- clean up files after runtime
- maybe save the old files in a separate directory?
- modify script to allow startOE to run before net.start()
- add ONOS as a controller in script
Usage:
------------
- import LINCLink and OpticalSwitch from this module
- import startOE and stopOE from this module
- create topology as you would a normal topology. when
to an optical switch with topo.addLink, always specify cls=LINCLink
- when creating an optical switch, use cls=OpticalSwitch in topo.addSwitch
- for annotations on links and switches, a dictionary must be passed in as
the annotations argument
- startOE must be run AFTER net.start() with net as an argument.
- stopOE can be run at any time
I created a separate function to start lincOE to avoid subclassing Mininet.
In case anyone wants to write something that DOES subclass Mininet, I
thought I would outline how:
If we want an object that starts lincOE within the mininet class itself,
we need to add another object to Mininet that contains all of the json object
information for each switch. We would still subclass switch and link, but these
classes would basically be dummy classes that store their own json information
in the Mininet class object. We may also change the default switch class to add
it's tap interfaces from lincOE during startup. The start() method for mininet would
grab all of the information from these switches and links, write configuration files
for lincOE using the json module, start lincOE, then run the start methodfor each
switch. The new start() method for each switch would parse through the sys.config
file that was created and find the tap interface it needs to connect to, similar
to the findTap function that I currently use. After all of the controllers and
switches have been started, the new Mininet start() method should also push the
Topology configuration file to ONOS.
'''
import sys
import re
import json
import os
from time import sleep
import urllib2
from mininet.node import Switch, OVSSwitch, RemoteController
from mininet.topo import Topo
from mininet.util import quietRun
from mininet.net import Mininet
from mininet.log import setLogLevel, info, error, warn
from mininet.link import Link, Intf
from mininet.cli import CLI
# Sleep time and timeout values in seconds
SLEEP_TIME = 2
TIMEOUT = 60
class OpticalSwitch(Switch):
"""
For now, same as Switch class.
"""
pass
class OpticalIntf(Intf):
"""
For now,same as Intf class.
"""
pass
class OpticalLink(Link):
"""
For now, same as Link.
"""
pass
class LINCSwitch(OpticalSwitch):
"""
LINCSwitch class
"""
# FIXME:Sometimes LINC doesn't remove pipes and on restart increase the pipe
# number from erlang.pipe.1.* to erlang.pipe.2.*, so should read and write
# from latest pipe files. For now we are removing all the pipes before
# starting LINC.
### User Name ###
user = os.getlogin()
### pipes ###
readPipe = "/tmp/home/{}/linc-oe/rel/linc/erlang.pipe.1.r".format(user)
writePipe = "/tmp/home/{}/linc-oe/rel/linc/erlang.pipe.1.w".format(user)
### sys.config path ###
sysConfig = "/home/{}/linc-oe/rel/linc/releases/1.0/sys.config".format(user)
### method, mapping dpid to LINC switchId ###
@staticmethod
def dpids_to_ids(sysConfig):
'''
return the dict containing switch dpids as key and LINC switch id as values
'''
dpids_to_ids = {}
fd = None
try:
with open(sysConfig, 'r', 0) as fd:
switch_id = 1
for line in fd:
dpid = re.search(r'([0-9A-Fa-f]{2}[:-]){7}([0-9A-Fa-f]{2})+', line, re.I)
if dpid:
dpids_to_ids[dpid.group().replace(':', '')] = switch_id
switch_id += 1
return dpids_to_ids
except:
print "Error working with {}\nError: {}\n".format(sysConfig, sys.exc_info())
fd.close()
return None
### dict of containing dpids as key and corresponding LINC switchId as values ###
dpidsToLINCSwitchId = dpids_to_ids.__func__(sysConfig)
@staticmethod
def findDir(directory, userName):
"finds and returns the path of any directory in the user's home directory"
homeDir = '/home/' + userName
Dir = quietRun('find %s -maxdepth 1 -name %s -type d' % (homeDir, directory)).strip('\n')
DirList = Dir.split('\n')
if not Dir:
return None
elif len(DirList) > 1 :
warn('***WARNING: Found multiple instances of %s; using %s\n'
% (directory, DirList[ 0 ]))
return DirList[ 0 ]
else:
return Dir
### ONOS Directory ###
try:
onosDir = os.environ[ 'ONOS_ROOT' ]
except:
onosDir = findDir('onos', user)
if not onosDir:
error('Please set ONOS_ROOT environment variable!\n')
else:
os.environ[ 'ONOS_ROOT' ] = onosDir
### REST USER/PASS ###
try:
restUser = os.environ[ 'ONOS_WEB_USER' ]
restPass = os.environ[ 'ONOS_WEB_PASS' ]
except:
error('***WARNING: $ONOS_WEB_USER and $ONOS_WEB_PASS aren\'t set!\n')
error('***WARNING: Setting (probably) sane WEB user/pass values\n')
restUser = 'onos'
restPass = 'rocks'
os.environ[ 'ONOS_WEB_USER' ] = restUser
os.environ[ 'ONOS_WEB_PASS' ] = restPass
### LINC-directory
lincDir = findDir.__func__('linc-oe', user)
if not lincDir:
error("***ERROR: Could not find linc-oe in user's home directory\n")
### LINC config generator directory###
configGen = findDir.__func__('LINC-config-generator', user)
if not configGen:
error("***ERROR: Could not find LINC-config-generator in user's home directory\n")
# list of all the controllers
controllers = None
def __init__(self, name, dpid=None, allowed=True,
switchType='ROADM', topo=None, annotations={}, controller=None, **params):
params[ 'inNamespace' ] = False
Switch.__init__(self, name, dpid=dpid, **params)
self.name = name
self.annotations = annotations
self.allowed = allowed
self.switchType = switchType
self.configDict = {} # dictionary that holds all of the JSON configuration data
self.crossConnects = []
self.deletedCrossConnects = []
self.controller = controller
self.lincId = self._get_linc_id() # use to communicate with LINC
self.lincStarted = False
def start(self, *opts, **params):
'''Instead of starting a virtual switch, we build the JSON
dictionary for the emulated optical switch'''
# TODO:Once LINC has the ability to spawn network element dynamically
# we need to use this method to spawn new logical LINC switch rather then
# bulding JSON.
# if LINC is started then we can start and stop logical switches else create JSON
if self.lincStarted:
return self.start_oe()
self.configDict[ 'uri' ] = 'of:' + self.dpid
self.configDict[ 'annotations' ] = self.annotations
self.configDict[ 'annotations' ].setdefault('name', self.name)
self.configDict[ 'type' ] = self.switchType
self.configDict[ 'ports' ] = []
for port, intf in self.intfs.items():
if intf.name == 'lo':
continue
else:
self.configDict[ 'ports' ].append(intf.json())
self.lincStarted = True
def stop(self, deleteIntfs=False):
'''
stop the existing switch
'''
# TODO:Add support for deleteIntf
self.stop_oe()
def dpctl( self, *args ):
"Run dpctl command: ignore for now"
pass
def write_to_cli(self, command):
'''
send command to LINC
'''
fd = None
try:
fd = open(self.writePipe, 'w', 0)
fd.write(command)
fd.close()
except:
print "Error working with {}\nError: {}\n".format(self.writePipe, sys.exc_info())
if fd:
fd.close()
def read_from_cli(self):
'''
read the output from the LINC CLI
'''
response = None
fd = None
try:
fd = open(self.readPipe, 'r', 0)
fcntl.fcntl(fd, fcntl.F_SETFL, os.O_NONBLOCK) # for non-blocking read
# FIXME:Due to non-blocking read most for the time we read nothing
response = fd.read()
fd.close()
except :
# print "Error working with {}\nError: {}\n".format(self.readPipe, sys.exc_info())
if fd:
fd.close()
return response
def _get_linc_id(self):
'''
return the corresponding LINC switchId.
'''
return LINCSwitch.dpidsToLINCSwitchId.get(self.dpid)
#--------------------------------------------------------------------------
# LINC CLI commands
#--------------------------------------------------------------------------
def start_oe(self):
'''
existing LINC switch
'''
#starting Switch
cmd = "linc:start_switch({}).\r\n".format(self.lincId)
self.write_to_cli(cmd)
#hanlding taps interfaces related to the switch
crossConnectJSON = {}
linkConfig = []
for i in range(0,len(self.deletedCrossConnects)):
crossConnect = self.deletedCrossConnects.pop()
tap = None
if isinstance(crossConnect.intf1.node, LINCSwitch):
intf = crossConnect.intf2
tapPort = crossConnect.intf1.port
else:
intf = crossConnect.intf1
tapPort = crossConnect.intf2.port
tap = LINCSwitch.findTap(self, tapPort)
if tap:
LINCSwitch.setupInts([tap])
intf.node.attach(tap)
self.crossConnects.append(crossConnect)
linkConfig.append(crossConnect.json())
#Sending crossConnect info to the ONOS.
crossConnectJSON['links'] = linkConfig
with open("crossConnect.json", 'w') as fd:
json.dump(crossConnectJSON, fd, indent=4, separators=(',', ': '))
info('*** Pushing crossConnect.json to ONOS\n')
output = quietRun('%s/tools/test/bin/onos-netcfg %s\
Topology.json' % (self.onosDir, self.controllers[ 0 ].ip), shell=True)
def stop_oe(self):
'''
stop the existing LINC switch
'''
cmd = "linc:stop_switch({}).\r\n".format(self.lincId)
self.write_to_cli(cmd)
#handling taps if any
for i in range(0, len(self.crossConnects)):
crossConnect = self.crossConnects.pop()
if isinstance(crossConnect.intf1.node, LINCSwitch):
intf = crossConnect.intf2
tapPort = crossConnect.intf1.port
else:
intf = crossConnect.intf1
tapPort = crossConnect.intf2.port
intf.node.detach(LINCSwitch.findTap(self, tapPort))
self.deletedCrossConnects.append(crossConnect)
def w_port_up(self, port):
'''
port_up
'''
cmd = "linc:port_up({},{}).\r\n".format(self.lincId, port)
self.write_to_cli(cmd)
def w_port_down(self, port):
'''
port_down
'''
cmd = "linc:port_down({},{}).\r\n".format(self.lincId, port)
self.write_to_cli(cmd)
# helper functions
@staticmethod
def switchJSON(switch):
"Returns the json configuration for a packet switch"
configDict = {}
configDict[ 'uri' ] = 'of:' + switch.dpid
configDict[ 'type' ] = 'SWITCH'
annotations = switch.params.get('annotations', {})
annotations.setdefault('name', switch.name)
configDict[ 'annotations' ] = annotations
ports = []
for port, intf in switch.intfs.items():
if intf.name == 'lo':
continue
portDict = {}
portDict[ 'port' ] = port
portType = 'COPPER'
if isinstance(intf.link, LINCLink):
portType = 'OCH' if intf.link.isCrossConnect() else 'OMS'
portDict[ 'type' ] = portType
intfList = [ intf.link.intf1, intf.link.intf2 ]
intfList.remove(intf)
portDict[ 'speed' ] = intfList[ 0 ].speed if isinstance(intf.link, LINCLink) else 0
ports.append(portDict)
configDict[ 'ports' ] = ports
return configDict
@staticmethod
def bootOE(net, domain=None):
"""
Start the LINC optical emulator within a mininet instance
This involves 1. converting the information stored in Linc* to configs
for both LINC and the network config system, 2. starting Linc, 3. connecting
cross-connects, and finally pushing the network configs to ONOS.
Inevitably, there are times when we have OVS switches that should not be
under the control of the controller in charge of the Linc switches. We
hint at these by passing domain information.
"""
LINCSwitch.opticalJSON = {}
linkConfig = []
devices = []
#setting up the controllers for LINCSwitch class
LINCSwitch.controllers = net.controllers
for switch in net.switches:
if domain and switch not in domain:
continue
if isinstance(switch, OpticalSwitch):
devices.append(switch.json())
elif isinstance(switch, OVSSwitch):
devices.append(LINCSwitch.switchJSON(switch))
LINCSwitch.opticalJSON[ 'devices' ] = devices
for link in net.links:
if isinstance(link, LINCLink) :
linkConfig.append(link.json())
LINCSwitch.opticalJSON[ 'links' ] = linkConfig
info('*** Writing Topology.json file\n')
topoJSON = LINCSwitch.makeTopoJSON()
with open('Topology.json', 'w') as outfile:
json.dump(topoJSON, outfile, indent=4, separators=(',', ': '))
info('*** Converting Topology.json to linc-oe format (TopoConfig.json) file (no oecfg) \n')
topoConfigJson = {}
topoConfigJson["switchConfig"] = LINCSwitch.getSwitchConfig(net.switches)
topoConfigJson["linkConfig"] = LINCSwitch.getLinkConfig(net.links)
#Writing to TopoConfig.json
with open( 'TopoConfig.json', 'w' ) as outfile:
json.dump( topoConfigJson, outfile, indent=4, separators=(',', ': ') )
info('*** Creating sys.config...\n')
output = quietRun('%s/config_generator TopoConfig.json %s/sys.config.template %s %s'
% (LINCSwitch.configGen, LINCSwitch.configGen, LINCSwitch.controllers[ 0 ].ip, LINCSwitch.controllers[ 0 ].port), shell=True)
if output:
error('***ERROR: Error creating sys.config file: %s\n' % output)
return False
info ('*** Setting multiple controllers in sys.config...\n')
searchStr = '\[{"Switch.*$'
ctrlStr = ''
for index in range(len(LINCSwitch.controllers)):
ctrlStr += '{"Switch%d-Controller","%s",%d,tcp},' % (index, net.controllers[index].ip, net.controllers[index].port)
replaceStr = '[%s]},' % ctrlStr[:-1] # Cut off last comma
sedCmd = 'sed -i \'s/%s/%s/\' sys.config' % (searchStr, replaceStr)
output = quietRun(sedCmd, shell=True)
info('*** Copying sys.config to linc-oe directory: ', output + '\n')
output = quietRun('cp -v sys.config %s/rel/linc/releases/1.0/' % LINCSwitch.lincDir, shell=True).strip('\n')
info(output + '\n')
info('*** Adding taps and bringing them up...\n')
LINCSwitch.setupInts(LINCSwitch.getTaps())
info('*** removing pipes if any \n')
quietRun('rm /tmp/home/%s/linc-oe/rel/linc/*' % LINCSwitch.user, shell=True)
info('*** Starting linc OE...\n')
output = quietRun('%s/rel/linc/bin/linc start' % LINCSwitch.lincDir, shell=True)
if output:
error('***ERROR: LINC-OE: %s' % output + '\n')
quietRun('%s/rel/linc/bin/linc stop' % LINCSwitch.lincDir, shell=True)
return False
info('*** Waiting for linc-oe to start...\n')
LINCSwitch.waitStarted(net)
info('*** Adding cross-connect (tap) interfaces to packet switches...\n')
for link in net.links:
if isinstance(link, LINCLink) and link.isCrossConnect():
for intf in [ link.intf1, link.intf2 ]:
if not isinstance(intf, LINCIntf):
intfList = [ intf.link.intf1, intf.link.intf2 ]
intfList.remove(intf)
intf2 = intfList[ 0 ]
intf.node.attach(LINCSwitch.findTap(intf2.node, intf2.node.ports[ intf2 ]))
info('*** Waiting for all devices to be available in ONOS...\n')
url = 'http://%s:8181/onos/v1/devices' % LINCSwitch.controllers[0].ip
time = 0
# Set up password authentication
pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
pw_mgr.add_password(None, url, LINCSwitch.restUser, LINCSwitch.restPass)
handler = urllib2.HTTPBasicAuthHandler(pw_mgr)
opener = urllib2.build_opener(handler)
opener.open(url)
urllib2.install_opener(opener)
# focus on just checking the state of devices we're interested in
# expected devices availability map
devMap = dict.fromkeys(map( lambda x: x['uri'], devices ), False)
while True:
response = json.load(urllib2.urlopen(url))
devs = response.get('devices')
# update availability map
for d in devs:
if devMap.has_key(d['id']):
devMap[d['id']] = d['available']
# Check if all devices we're interested became available
if all(devMap.viewvalues()):
break;
if (time >= TIMEOUT):
error('***ERROR: ONOS did not register devices within %s seconds\n' % TIMEOUT)
break
time += SLEEP_TIME
sleep(SLEEP_TIME)
info('*** Pushing Topology.json to ONOS\n')
for index in range(len(LINCSwitch.controllers)):
output = quietRun('%s/tools/test/bin/onos-netcfg %s Topology.json &'\
% (LINCSwitch.onosDir, LINCSwitch.controllers[ index ].ip), shell=True)
# successful output contains the two characters '{}'
# if there is more output than this, there is an issue
if output.strip('{}'):
warn('***WARNING: Could not push topology file to ONOS: %s\n' % output)
#converts node ids to linc-oe format, with colons every two chars
@staticmethod
def dpId(id):
nodeDpid = ""
id = id.split("/", 1)[0]
for i in range(3, len(id) - 1, 2):
nodeDpid += (id[i:(i + 2):]) + ":"
return nodeDpid[0:-1]
@staticmethod
def makeTopoJSON():
"""
Builds ONOS network config system compatible dicts to be written as Topology.json file.
"""
topology = {}
links = {}
devices = {}
ports = {}
BasicDevConfigKeys = ['name', 'type', 'latitude', 'longitude', 'allowed',
'rackAddress', 'owner', 'driver', 'manufacturer',
'hwVersion', 'swVersion', 'serial',
'managementAddress']
for switch in LINCSwitch.opticalJSON[ 'devices' ]:
# Build device entries - keyed on uri (DPID) and config key 'basic'
# 'type' is necessary field, else ONOS assumes it's a SWITCH
# Annotations hold switch name and latitude/longitude
devDict = {}
devDict[ 'type' ] = switch[ 'type' ]
devDict.update({k: v for k, v in switch[ 'annotations' ].iteritems() if k in BasicDevConfigKeys})
devSubj = switch[ 'uri' ]
devices[ devSubj ] = { 'basic': devDict }
# Build port entries - keyed on "uri/port" and config key 'optical'
for port in switch[ 'ports' ]:
portSubj = devSubj + '/' + str(port[ 'port' ])
ports[ portSubj ] = { 'optical': port }
# Build link entries - keyed on "uri/port-uri/port" and config key 'basic'
# Annotations hold the 'durable' field, which is necessary as long as we don't discover optical links
for link in LINCSwitch.opticalJSON[ 'links' ]:
linkDict = {}
linkDict[ 'type' ] = link[ 'type' ]
linkDict.update(link[ 'annotations' ])
linkSubj = link[ 'src' ] + '-' + link[ 'dst' ]
links[ linkSubj ] = { 'basic': linkDict }
topology[ 'links' ] = links
topology[ 'devices' ] = devices
topology[ 'ports' ] = ports
return topology
@staticmethod
def getSwitchConfig(switches):
switchConfig = []
# Iterate through all switches and convert the ROADM switches to linc-oe format
for switch in switches:
if isinstance(switch, LINCSwitch):
builtSwitch = {}
# Set basic switch params based on annotations
builtSwitch["allowed"] = True
builtSwitch["latitude"] = switch.annotations.get("latitude", 0.0)
builtSwitch["longitude"] = switch.annotations.get("longitude", 0.0)
# Convert dpid to linc-oe format
builtSwitch["name"] = switch.name
builtSwitch["nodeDpid"] = LINCSwitch.dpId('of:' + switch.dpid)
# Set switch params and type
builtSwitch["params"] = {}
builtSwitch["params"]["numregens"] = switch.annotations.get("optical.regens", 0)
builtSwitch["type"] = "Roadm"
switchConfig.append(builtSwitch)
return switchConfig
@staticmethod
def getLinkConfig(links):
linkConfig = []
# Iterate through all non-edge links and convert them to linc-oe format
for link in links:
if isinstance(link, LINCLink):
builtLink = {}
# Set basic link params for src and dst
builtLink["allowed"] = True
builtLink["nodeDpid1"] = LINCSwitch.dpId('of:' + link.intf1.node.dpid)
builtLink["nodeDpid2"] = LINCSwitch.dpId('of:' + link.intf2.node.dpid)
# Set more params such as name/bandwidth/port if they exist
params = {}
params["nodeName1"] = link.intf1.node.name
params["nodeName2"] = link.intf2.node.name
params["port1"] = link.port1
params["port2"] = link.port2
if "bandwidth" in link.annotations:
params["bandwidth"] = link.annotations["bandwidth"]
builtLink["params"] = params
# Set link type to WDM or packet (LINC-config-generator relies on it)
if link.isTransportLayer():
builtLink["type"] = "wdmLink"
else:
builtLink["type"] = "pktOptLink"
linkConfig.append(builtLink)
return linkConfig
@staticmethod
def waitStarted(net, timeout=TIMEOUT):
"wait until all tap interfaces are available"
tapCount = 0
time = 0
for link in net.links:
if isinstance(link, LINCLink) and link.isCrossConnect():
tapCount += 1
while True:
# tapCount can be less than the actual number of taps if the optical network
# is a subgraph of a larger multidomain network.
tapNum = int(quietRun('ip addr | grep tap | wc -l', shell=True).strip('\n'))
if tapCount <= tapNum:
return True
if timeout:
if time >= TIMEOUT:
error('***ERROR: LINC OE did not start within %s seconds\n' % TIMEOUT)
return False
time += SLEEP_TIME
sleep(SLEEP_TIME)
@staticmethod
def shutdownOE():
"stop the optical emulator"
info('*** Stopping linc OE...\n')
quietRun('%s/rel/linc/bin/linc stop' % LINCSwitch.lincDir, shell=True)
@staticmethod
def setupInts(intfs):
'''
add taps and bring them up.
'''
for i in intfs:
quietRun('ip tuntap add dev %s mode tap' % i)
quietRun('ip link set dev %s up' % i)
info('*** Intf %s set\n' % i)
@staticmethod
def getTaps(path=None):
'''
return list of all the taps in sys.config
'''
if path is None:
path = '%s/rel/linc/releases/1.0/sys.config' % LINCSwitch.lincDir
fd = open(path, 'r', 0)
sys_data = fd.read()
taps = re.findall('tap\d+', sys_data)
fd.close()
return taps
@staticmethod
def findTap(node, port, path=None):
'''utility function to parse through a sys.config
file to find tap interfaces for a switch'''
switch = False
portLine = ''
intfLines = []
if path is None:
path = '%s/rel/linc/releases/1.0/sys.config' % LINCSwitch.lincDir
with open(path) as f:
for line in f:
if 'tap' in line:
intfLines.append(line)
if node.dpid in line.translate(None, ':'):
switch = True
continue
if switch:
if 'switch' in line:
switch = False
if 'port_no,%s}' % port in line:
portLine = line
break
if portLine:
m = re.search('port,\d+', portLine)
port = m.group(0).split(',')[ 1 ]
else:
error('***ERROR: Could not find any ports in sys.config\n')
return
for intfLine in intfLines:
if 'port,%s' % port in intfLine:
return re.findall('tap\d+', intfLine)[ 0 ]
def json(self):
"return json configuration dictionary for switch"
return self.configDict
def terminate(self):
pass
class LINCLink(Link):
"""
LINC link class
"""
def __init__(self, node1, node2, port1=None, port2=None, allowed=True,
intfName1=None, intfName2=None, linkType='OPTICAL',
annotations={}, speed1=0, speed2=0, **params):
"Creates a dummy link without a virtual ethernet pair."
self.allowed = allowed
self.annotations = annotations
self.linkType = linkType
self.port1 = port1
self.port2 = port2
params1 = { 'speed': speed1 }
params2 = { 'speed': speed2 }
if isinstance(node1, LINCSwitch) and isinstance(node2, LINCSwitch):
self.isXC = False
else:
self.isXC = True
if isinstance(node1, LINCSwitch):
cls1 = LINCIntf
if self.isXC:
node1.crossConnects.append(self)
else:
cls1 = Intf
# bad hack to stop error message from appearing when we try to set up intf in a packet switch,
# and there is no interface there( because we do not run makeIntfPair ). This way, we just set lo up
intfName1 = 'lo'
if isinstance(node2, LINCSwitch):
cls2 = LINCIntf
if self.isXC:
node2.crossConnects.append(self)
else:
cls2 = Intf
intfName2 = 'lo'
Link.__init__(self, node1, node2, port1=port1, port2=port2,
intfName1=intfName1, intfName2=intfName2, cls1=cls1,
cls2=cls2, params1=params1, params2=params2)
@classmethod
def makeIntfPair(_cls, intfName1, intfName2, *args, **kwargs):
pass
def json(self):
"build and return the json configuration dictionary for this link"
configData = {}
configData[ 'src' ] = ('of:' + self.intf1.node.dpid +
'/%s' % self.intf1.node.ports[ self.intf1 ])
configData[ 'dst' ] = ('of:' + self.intf2.node.dpid +
'/%s' % self.intf2.node.ports[ self.intf2 ])
configData[ 'type' ] = self.linkType
configData[ 'annotations' ] = self.annotations
return configData
def isCrossConnect(self):
if isinstance(self.intf1.node, LINCSwitch) ^ isinstance(self.intf2.node, LINCSwitch):
return True
return False
def isTransportLayer(self):
if isinstance(self.intf1.node, LINCSwitch) and isinstance(self.intf2.node, LINCSwitch):
return True
return False
class LINCIntf(OpticalIntf):
"""
LINC interface class
"""
def __init__(self, name=None, node=None, speed=0,
port=None, link=None, **params):
self.node = node
self.speed = speed
self.port = port
self.link = link
self.name = name
node.addIntf(self, port=port)
self.params = params
self.ip = None
def json(self):
"build and return the JSON information for this interface( not used right now )"
configDict = {}
configDict[ 'port' ] = self.port
configDict[ 'speed' ] = self.speed
portType = 'COPPER'
if isinstance(self.link, LINCLink):
portType = 'OCH' if self.link.isCrossConnect() else 'OMS'
configDict[ 'type' ] = portType
return configDict
def config(self, *args, **kwargs):
"dont configure a dummy interface"
pass
def ifconfig(self, status):
"configure the status"
if status == "up":
return self.node.w_port_up(self.port)
elif status == "down":
return self.node.w_port_down(self.port)
class MininetOE(Mininet):
"Mininet with Linc-OE support (starts and stops linc-oe)"
def start(self):
Mininet.start(self)
LINCSwitch.bootOE(self)
def stop(self):
Mininet.stop(self)
LINCSwitch.shutdownOE()
def addControllers(self, controllers):
i = 0
for ctrl in controllers:
self.addController(RemoteController('c%d' % i, ip=ctrl))
i += 1
if __name__ == '__main__':
pass
|
4eek/edx-platform | refs/heads/master | lms/djangoapps/certificates/tests/test_webview_views.py | 5 | """Tests for certificates views. """
import json
from uuid import uuid4
from nose.plugins.attrib import attr
from mock import patch
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.client import Client
from django.test.utils import override_settings
from openedx.core.lib.tests.assertions.events import assert_event_matches
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from student.roles import CourseStaffRole
from track.tests import EventTrackingTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from certificates.api import get_certificate_url
from certificates.models import (
GeneratedCertificate,
BadgeAssertion,
CertificateStatuses,
CertificateSocialNetworks,
CertificateTemplate,
)
from certificates.tests.factories import (
CertificateHtmlViewConfigurationFactory,
LinkedInAddToProfileConfigurationFactory,
)
from util import organizations_helpers as organizations_api
from django.test.client import RequestFactory
import urllib
FEATURES_WITH_CERTS_ENABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_ENABLED['CERTIFICATES_HTML_VIEW'] = True
FEATURES_WITH_CERTS_DISABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_DISABLED['CERTIFICATES_HTML_VIEW'] = False
FEATURES_WITH_CUSTOM_CERTS_ENABLED = {
"CUSTOM_CERTIFICATE_TEMPLATES_ENABLED": True
}
FEATURES_WITH_CUSTOM_CERTS_ENABLED.update(FEATURES_WITH_CERTS_ENABLED)
@attr('shard_1')
class CertificatesViewsTests(ModuleStoreTestCase, EventTrackingTestCase):
"""
Tests for the certificates web/html views
"""
def setUp(self):
super(CertificatesViewsTests, self).setUp()
self.client = Client()
self.course = CourseFactory.create(
org='testorg', number='run1', display_name='refundable course'
)
self.course_id = self.course.location.course_key
self.user = UserFactory.create(
email='joe_user@edx.org',
username='joeuser',
password='foo'
)
self.user.profile.name = "Joe User"
self.user.profile.save()
self.client.login(username=self.user.username, password='foo')
self.request = RequestFactory().request()
self.cert = GeneratedCertificate.objects.create(
user=self.user,
course_id=self.course_id,
verify_uuid=uuid4(),
download_uuid=uuid4(),
download_url="http://www.example.com/certificates/download",
grade="0.95",
key='the_key',
distinction=True,
status='generated',
mode='honor',
name=self.user.profile.name,
)
CourseEnrollmentFactory.create(
user=self.user,
course_id=self.course_id
)
CertificateHtmlViewConfigurationFactory.create()
LinkedInAddToProfileConfigurationFactory.create()
def _add_course_certificates(self, count=1, signatory_count=0, is_active=True):
"""
Create certificate for the course.
"""
signatories = [
{
'name': 'Signatory_Name ' + str(i),
'title': 'Signatory_Title ' + str(i),
'organization': 'Signatory_Organization ' + str(i),
'signature_image_path': '/static/certificates/images/demo-sig{}.png'.format(i),
'id': i,
} for i in xrange(signatory_count)
]
certificates = [
{
'id': i,
'name': 'Name ' + str(i),
'description': 'Description ' + str(i),
'course_title': 'course_title_' + str(i),
'org_logo_path': '/t4x/orgX/testX/asset/org-logo-{}.png'.format(i),
'signatories': signatories,
'version': 1,
'is_active': is_active
} for i in xrange(count)
]
self.course.certificates = {'certificates': certificates}
self.course.cert_html_view_enabled = True
self.course.save()
self.store.update_item(self.course, self.user.id)
def _create_custom_template(self, org_id=None, mode=None, course_key=None):
"""
Creates a custom certificate template entry in DB.
"""
template_html = """
<html>
<body>
lang: ${LANGUAGE_CODE}
course name: ${accomplishment_copy_course_name}
mode: ${course_mode}
${accomplishment_copy_course_description}
</body>
</html>
"""
template = CertificateTemplate(
name='custom template',
template=template_html,
organization_id=org_id,
course_key=course_key,
mode=mode,
is_active=True
)
template.save()
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_linkedin_share_url(self):
"""
Test: LinkedIn share URL.
"""
self._add_course_certificates(count=1, signatory_count=1, is_active=True)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url)
self.assertTrue(urllib.quote_plus(self.request.build_absolute_uri(test_url)) in response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_rendering_course_organization_data(self):
"""
Test: organization data should render on certificate web view if course has organization.
"""
test_organization_data = {
'name': 'test organization',
'short_name': 'test_organization',
'description': 'Test Organization Description',
'active': True,
'logo': '/logo_test1.png/'
}
test_org = organizations_api.add_organization(organization_data=test_organization_data)
organizations_api.add_organization_course(organization_data=test_org, course_id=unicode(self.course.id))
self._add_course_certificates(count=1, signatory_count=1, is_active=True)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url)
self.assertIn(
'a course of study offered by test_organization, an online learning initiative of test organization',
response.content
)
self.assertNotIn(
'a course of study offered by testorg',
response.content
)
self.assertIn(
'<title>test_organization {} Certificate |'.format(self.course.number, ),
response.content
)
self.assertIn('logo_test1.png', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_valid_certificate(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
self._add_course_certificates(count=1, signatory_count=2)
response = self.client.get(test_url)
self.assertIn(str(self.cert.verify_uuid), response.content)
# Hit any "verified" mode-specific branches
self.cert.mode = 'verified'
self.cert.save()
response = self.client.get(test_url)
self.assertIn(str(self.cert.verify_uuid), response.content)
# Hit any 'xseries' mode-specific branches
self.cert.mode = 'xseries'
self.cert.save()
response = self.client.get(test_url)
self.assertIn(str(self.cert.verify_uuid), response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_with_valid_signatories(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
self._add_course_certificates(count=1, signatory_count=2)
response = self.client.get(test_url)
self.assertIn('course_title_0', response.content)
self.assertIn('Signatory_Name 0', response.content)
self.assertIn('Signatory_Title 0', response.content)
self.assertIn('Signatory_Organization 0', response.content)
self.assertIn('/static/certificates/images/demo-sig0.png', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_course_display_name_not_override_with_course_title(self):
# if certificate in descriptor has not course_title then course name should not be overridden with this title.
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
test_certificates = [
{
'id': 0,
'name': 'Name 0',
'description': 'Description 0',
'signatories': [],
'version': 1,
'is_active':True
}
]
self.course.certificates = {'certificates': test_certificates}
self.course.cert_html_view_enabled = True
self.course.save()
self.store.update_item(self.course, self.user.id)
response = self.client.get(test_url)
self.assertNotIn('test_course_title_0', response.content)
self.assertIn('refundable course', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_certificate_view_without_org_logo(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
test_certificates = [
{
'id': 0,
'name': 'Certificate Name 0',
'signatories': [],
'version': 1,
'is_active': True
}
]
self.course.certificates = {'certificates': test_certificates}
self.course.cert_html_view_enabled = True
self.course.save()
self.store.update_item(self.course, self.user.id)
response = self.client.get(test_url)
# make sure response html has only one organization logo container for edX
self.assertContains(response, "<li class=\"wrapper-organization\">", 1)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_without_signatories(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course)
)
self._add_course_certificates(count=1, signatory_count=0)
response = self.client.get(test_url)
self.assertNotIn('Signatory_Name 0', response.content)
self.assertNotIn('Signatory_Title 0', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_DISABLED)
def test_render_html_view_disabled_feature_flag_returns_static_url(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
self.assertIn(str(self.cert.download_url), test_url)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_invalid_course_id(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id='az/23423/4vs'
)
response = self.client.get(test_url)
self.assertIn('invalid', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_invalid_course(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id='missing/course/key'
)
response = self.client.get(test_url)
self.assertIn('invalid', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_invalid_user(self):
test_url = get_certificate_url(
user_id=111,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url)
self.assertIn('invalid', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_invalid_user_certificate(self):
self.cert.delete()
self.assertEqual(len(GeneratedCertificate.objects.all()), 0)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url)
self.assertIn('invalid', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_with_preview_mode(self):
"""
test certificate web view should render properly along with its signatories information when accessing it in
preview mode. Either the certificate is marked active or not.
"""
self.cert.delete()
self.assertEqual(len(GeneratedCertificate.objects.all()), 0)
self._add_course_certificates(count=1, signatory_count=2)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url + '?preview=honor')
#accessing certificate web view in preview mode without
# staff or instructor access should show invalid certificate
self.assertIn('This is an invalid certificate number', response.content)
CourseStaffRole(self.course.id).add_users(self.user)
response = self.client.get(test_url + '?preview=honor')
self.assertNotIn(self.course.display_name, response.content)
self.assertIn('course_title_0', response.content)
self.assertIn('Signatory_Title 0', response.content)
# mark certificate inactive but accessing in preview mode.
self._add_course_certificates(count=1, signatory_count=2, is_active=False)
response = self.client.get(test_url + '?preview=honor')
self.assertNotIn(self.course.display_name, response.content)
self.assertIn('course_title_0', response.content)
self.assertIn('Signatory_Title 0', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_with_preview_mode_when_user_already_has_cert(self):
"""
test certificate web view should render properly in
preview mode even if user who is previewing already has a certificate
generated with different mode.
"""
self._add_course_certificates(count=1, signatory_count=2)
CourseStaffRole(self.course.id).add_users(self.user)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
# user has already has certificate generated for 'honor' mode
# so let's try to preview in 'verified' mode.
response = self.client.get(test_url + '?preview=verified')
self.assertNotIn(self.course.display_name, response.content)
self.assertIn('course_title_0', response.content)
self.assertIn('Signatory_Title 0', response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_render_html_view_invalid_certificate_configuration(self):
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url)
self.assertIn("Invalid Certificate", response.content)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_certificate_evidence_event_emitted(self):
self.client.logout()
self._add_course_certificates(count=1, signatory_count=2)
self.recreate_tracker()
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
actual_event = self.get_event()
self.assertEqual(actual_event['name'], 'edx.certificate.evidence_visited')
assert_event_matches(
{
'user_id': self.user.id,
'certificate_id': unicode(self.cert.verify_uuid),
'enrollment_mode': self.cert.mode,
'certificate_url': test_url,
'course_id': unicode(self.course.id),
'social_network': CertificateSocialNetworks.linkedin
},
actual_event['data']
)
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
def test_evidence_event_sent(self):
cert_url = get_certificate_url(
user_id=self.user.id,
course_id=self.course_id
)
test_url = '{}?evidence_visit=1'.format(cert_url)
self._add_course_certificates(count=1, signatory_count=2)
self.recreate_tracker()
assertion = BadgeAssertion(
user=self.user, course_id=self.course_id, mode='honor',
data={
'image': 'http://www.example.com/image.png',
'json': {'id': 'http://www.example.com/assertion.json'},
'issuer': 'http://www.example.com/issuer.json',
}
)
assertion.save()
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
assert_event_matches(
{
'name': 'edx.badge.assertion.evidence_visited',
'data': {
'course_id': 'testorg/run1/refundable_course',
# pylint: disable=no-member
'assertion_id': assertion.id,
'assertion_json_url': 'http://www.example.com/assertion.json',
'assertion_image_url': 'http://www.example.com/image.png',
'user_id': self.user.id,
'issuer': 'http://www.example.com/issuer.json',
'enrollment_mode': 'honor',
},
},
self.get_event()
)
@override_settings(FEATURES=FEATURES_WITH_CERTS_DISABLED)
def test_request_certificate_without_passing(self):
self.cert.status = CertificateStatuses.unavailable
self.cert.save()
request_certificate_url = reverse('certificates.views.request_certificate')
response = self.client.post(request_certificate_url, {'course_id': unicode(self.course.id)})
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(CertificateStatuses.notpassing, response_json['add_status'])
@override_settings(FEATURES=FEATURES_WITH_CERTS_DISABLED)
@override_settings(CERT_QUEUE='test-queue')
def test_request_certificate_after_passing(self):
self.cert.status = CertificateStatuses.unavailable
self.cert.save()
request_certificate_url = reverse('certificates.views.request_certificate')
with patch('capa.xqueue_interface.XQueueInterface.send_to_queue') as mock_queue:
mock_queue.return_value = (0, "Successfully queued")
with patch('courseware.grades.grade') as mock_grade:
mock_grade.return_value = {'grade': 'Pass', 'percent': 0.75}
response = self.client.post(request_certificate_url, {'course_id': unicode(self.course.id)})
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(CertificateStatuses.generating, response_json['add_status'])
@override_settings(FEATURES=FEATURES_WITH_CUSTOM_CERTS_ENABLED)
@override_settings(LANGUAGE_CODE='fr')
def test_certificate_custom_template_with_org_mode_course(self):
"""
Tests custom template search and rendering.
This test should check template matching when org={org}, course={course}, mode={mode}.
"""
self._add_course_certificates(count=1, signatory_count=2)
self._create_custom_template(org_id=1, mode='honor', course_key=unicode(self.course.id))
self._create_custom_template(org_id=2, mode='honor')
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
with patch('certificates.api.get_course_organizations') as mock_get_orgs:
mock_get_orgs.side_effect = [
[{"id": 1, "name": "organization name"}],
[{"id": 2, "name": "organization name 2"}],
]
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'lang: fr')
self.assertContains(response, 'course name: course_title_0')
# test with second organization template
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'lang: fr')
self.assertContains(response, 'course name: course_title_0')
@override_settings(FEATURES=FEATURES_WITH_CUSTOM_CERTS_ENABLED)
def test_certificate_custom_template_with_org(self):
"""
Tests custom template search if we have a single template for organization and mode
with course set to Null.
This test should check template matching when org={org}, course=Null, mode={mode}.
"""
course = CourseFactory.create(
org='cstX', number='cst_22', display_name='custom template course'
)
self._add_course_certificates(count=1, signatory_count=2)
self._create_custom_template(org_id=1, mode='honor')
self._create_custom_template(org_id=1, mode='honor', course_key=course.id)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
with patch('certificates.api.get_course_organizations') as mock_get_orgs:
mock_get_orgs.side_effect = [
[{"id": 1, "name": "organization name"}],
]
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'course name: course_title_0')
@override_settings(FEATURES=FEATURES_WITH_CUSTOM_CERTS_ENABLED)
def test_certificate_custom_template_with_organization(self):
"""
Tests custom template search when we have a single template for a organization.
This test should check template matching when org={org}, course=Null, mode=null.
"""
self._add_course_certificates(count=1, signatory_count=2)
self._create_custom_template(org_id=1, mode='honor')
self._create_custom_template(org_id=1, mode='honor', course_key=self.course.id)
self._create_custom_template(org_id=2)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
with patch('certificates.api.get_course_organizations') as mock_get_orgs:
mock_get_orgs.side_effect = [
[{"id": 2, "name": "organization name 2"}],
]
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
@override_settings(FEATURES=FEATURES_WITH_CUSTOM_CERTS_ENABLED)
def test_certificate_custom_template_with_course_mode(self):
"""
Tests custom template search if we have a single template for a course mode.
This test should check template matching when org=null, course=Null, mode={mode}.
"""
mode = 'honor'
self._add_course_certificates(count=1, signatory_count=2)
self._create_custom_template(mode=mode)
test_url = get_certificate_url(
user_id=self.user.id,
course_id=unicode(self.course.id)
)
with patch('certificates.api.get_course_organizations') as mock_get_orgs:
mock_get_orgs.return_value = []
response = self.client.get(test_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'mode: {}'.format(mode))
|
TeamWin/android_device_htc_pico | refs/heads/android-4.4 | releasetools/edify_generator.py | 1 | # Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import common
class EdifyGenerator(object):
"""Class to generate scripts in the 'edify' recovery script language
used from donut onwards."""
def __init__(self, version, info):
self.script = []
self.mounts = set()
self.version = version
self.info = info
def MakeTemporary(self):
"""Make a temporary script object whose commands can latter be
appended to the parent script with AppendScript(). Used when the
caller wants to generate script commands out-of-order."""
x = EdifyGenerator(self.version, self.info)
x.mounts = self.mounts
return x
@staticmethod
def _WordWrap(cmd, linelen=80):
"""'cmd' should be a function call with null characters after each
parameter (eg, "somefun(foo,\0bar,\0baz)"). This function wraps cmd
to a given line length, replacing nulls with spaces and/or newlines
to format it nicely."""
indent = cmd.index("(")+1
out = []
first = True
x = re.compile("^(.{,%d})\0" % (linelen-indent,))
while True:
if not first:
out.append(" " * indent)
first = False
m = x.search(cmd)
if not m:
parts = cmd.split("\0", 1)
out.append(parts[0]+"\n")
if len(parts) == 1:
break
else:
cmd = parts[1]
continue
out.append(m.group(1)+"\n")
cmd = cmd[m.end():]
return "".join(out).replace("\0", " ").rstrip("\n")
def AppendScript(self, other):
"""Append the contents of another script (which should be created
with temporary=True) to this one."""
self.script.extend(other.script)
def AssertSomeFingerprint(self, *fp):
"""Assert that the current system build fingerprint is one of *fp."""
if not fp:
raise ValueError("must specify some fingerprints")
cmd = (
' ||\n '.join([('file_getprop("/system/build.prop", '
'"ro.build.fingerprint") == "%s"')
% i for i in fp]) +
' ||\n abort("Package expects build fingerprint of %s; this '
'device has " + getprop("ro.build.fingerprint") + ".");'
) % (" or ".join(fp),)
self.script.append(cmd)
def AssertOlderBuild(self, timestamp, timestamp_text):
"""Assert that the build on the device is older (or the same as)
the given timestamp."""
self.script.append(
('(!less_than_int(%s, getprop("ro.build.date.utc"))) || '
'abort("Can\'t install this package (%s) over newer '
'build (" + getprop("ro.build.date") + ").");'
) % (timestamp, timestamp_text))
def AssertDevice(self, device):
"""Assert that the device identifier is the given string."""
cmd = ('(' +
' || \0'.join(['getprop("ro.product.device") == "%s" || getprop("ro.build.product") == "%s"'
% (i, i) for i in device.split(",")]) +
') || abort("This package is for \\"%s\\" devices\n'
'this is a \\"" + getprop("ro.product.device") + "\\".");'
) % (device)
self.script.append(self._WordWrap(cmd))
def AssertSomeBootloader(self, *bootloaders):
"""Asert that the bootloader version is one of *bootloaders."""
cmd = ("assert(" +
" ||\0".join(['getprop("ro.bootloader") == "%s"' % (b,)
for b in bootloaders]) +
");")
self.script.append(self._WordWrap(cmd))
def RunBackup(self, command):
self.script.append('package_extract_file("system/bin/backuptool.sh", "/tmp/backuptool.sh");')
self.script.append('package_extract_file("system/bin/backuptool.functions", "/tmp/backuptool.functions");')
self.script.append('set_perm(0, 0, 0777, "/tmp/backuptool.sh");')
self.script.append('set_perm(0, 0, 0644, "/tmp/backuptool.functions");')
self.script.append(('run_program("/tmp/backuptool.sh", "%s");' % command))
if command == "restore":
self.script.append('delete("/system/bin/backuptool.sh");')
self.script.append('delete("/system/bin/backuptool.functions");')
def ShowProgress(self, frac, dur):
"""Update the progress bar, advancing it over 'frac' over the next
'dur' seconds. 'dur' may be zero to advance it via SetProgress
commands instead of by time."""
self.script.append("show_progress(%f, %d);" % (frac, int(dur)))
def SetProgress(self, frac):
"""Set the position of the progress bar within the chunk defined
by the most recent ShowProgress call. 'frac' should be in
[0,1]."""
self.script.append("set_progress(%f);" % (frac,))
def PatchCheck(self, filename, *sha1):
"""Check that the given file (or MTD reference) has one of the
given *sha1 hashes, checking the version saved in cache if the
file does not match."""
self.script.append(
'apply_patch_check("%s"' % (filename,) +
"".join([', "%s"' % (i,) for i in sha1]) +
') || abort("\\"%s\\" has unexpected contents.");' % (filename,))
def FileCheck(self, filename, *sha1):
"""Check that the given file (or MTD reference) has one of the
given *sha1 hashes."""
self.script.append('assert(sha1_check(read_file("%s")' % (filename,) +
"".join([', "%s"' % (i,) for i in sha1]) +
'));')
def CacheFreeSpaceCheck(self, amount):
"""Check that there's at least 'amount' space that can be made
available on /cache."""
self.script.append(('apply_patch_space(%d) || abort("Not enough free space '
'on /system to apply patches.");') % (amount,))
def Mount(self, mount_point, mount_by_label = False):
"""Mount the partition with the given mount_point."""
fstab = self.info.get("fstab", None)
if fstab:
p = fstab[mount_point]
if mount_by_label:
self.script.append('run_program("/sbin/mount", "%s");' % (mount_point,))
else:
self.script.append('mount("%s", "%s", "%s", "%s");' %
(p.fs_type, common.PARTITION_TYPES[p.fs_type],
p.device, p.mount_point))
self.mounts.add(p.mount_point)
def Unmount(self, mount_point):
"""Unmount the partiiton with the given mount_point."""
if mount_point in self.mounts:
self.mounts.remove(mount_point)
self.script.append('unmount("%s");' % (mount_point,))
def UnpackPackageDir(self, src, dst):
"""Unpack a given directory from the OTA package into the given
destination directory."""
self.script.append('package_extract_dir("%s", "%s");' % (src, dst))
def Comment(self, comment):
"""Write a comment into the update script."""
self.script.append("")
for i in comment.split("\n"):
self.script.append("# " + i)
self.script.append("")
def Print(self, message):
"""Log a message to the screen (if the logs are visible)."""
self.script.append('ui_print("%s");' % (message,))
def FormatPartition(self, partition, mount_by_label = False):
"""Format the given partition, specified by its mount point (eg,
"/system")."""
reserve_size = 0
fstab = self.info.get("fstab", None)
if fstab:
p = fstab[partition]
if mount_by_label:
if not p.mount_point in self.mounts:
self.script.mount(p.mount_point)
self.script.append('run_program("/sbin/rm", "-rf", "%s");' % (p.mount_point,))
else:
self.script.append('format("%s", "%s", "%s", "%s", "%s");' %
(p.fs_type, common.PARTITION_TYPES[p.fs_type],
p.device, p.length, p.mount_point))
def DeleteFiles(self, file_list):
"""Delete all files in file_list."""
if not file_list: return
cmd = "delete(" + ",\0".join(['"%s"' % (i,) for i in file_list]) + ");"
self.script.append(self._WordWrap(cmd))
def RenameFile(self, srcfile, tgtfile):
"""Moves a file from one location to another."""
if self.info.get("update_rename_support", False):
self.script.append('rename("%s", "%s");' % (srcfile, tgtfile))
def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs):
"""Apply binary patches (in *patchpairs) to the given srcfile to
produce tgtfile (which may be "-" to indicate overwriting the
source file."""
if len(patchpairs) % 2 != 0 or len(patchpairs) == 0:
raise ValueError("bad patches given to ApplyPatch")
cmd = ['apply_patch("%s",\0"%s",\0%s,\0%d'
% (srcfile, tgtfile, tgtsha1, tgtsize)]
for i in range(0, len(patchpairs), 2):
cmd.append(',\0%s, package_extract_file("%s")' % patchpairs[i:i+2])
cmd.append(');')
cmd = "".join(cmd)
self.script.append(self._WordWrap(cmd))
def WriteRawImage(self, mount_point, fn):
"""Write the given package file into the partition for the given
mount point."""
fstab = self.info["fstab"]
if fstab:
p = fstab[mount_point]
partition_type = common.PARTITION_TYPES[p.fs_type]
args = {'device': p.device, 'fn': fn}
if partition_type == "MTD":
self.script.append(
('assert(package_extract_file("%(fn)s", "/tmp/%(device)s.img"),\n'
' write_raw_image("/tmp/%(device)s.img", "%(device)s"),\n'
' delete("/tmp/%(device)s.img"));') % args)
elif partition_type == "EMMC":
self.script.append(
'package_extract_file("%(fn)s", "%(device)s");' % args)
elif partition_type == "BML":
self.script.append(
('assert(package_extract_file("%(fn)s", "/tmp/%(device)s.img"),\n'
' write_raw_image("/tmp/%(device)s.img", "%(device)s"),\n'
' delete("/tmp/%(device)s.img"));') % args)
else:
raise ValueError("don't know how to write \"%s\" partitions" % (p.fs_type,))
def SetPermissions(self, fn, uid, gid, mode, selabel, capabilities):
"""Set file ownership and permissions."""
if not self.info.get("use_set_metadata", False):
self.script.append('set_perm(%d, %d, 0%o, "%s");' % (uid, gid, mode, fn))
else:
if capabilities is None: capabilities = "0x0"
cmd = 'set_metadata("%s", "uid", %d, "gid", %d, "mode", 0%o, ' \
'"capabilities", %s' % (fn, uid, gid, mode, capabilities)
if selabel is not None:
cmd += ', "selabel", "%s"' % ( selabel )
cmd += ');'
self.script.append(cmd)
def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode, selabel, capabilities):
"""Recursively set path ownership and permissions."""
if not self.info.get("use_set_metadata", False):
self.script.append('set_perm_recursive(%d, %d, 0%o, 0%o, "%s");'
% (uid, gid, dmode, fmode, fn))
else:
if capabilities is None: capabilities = "0x0"
cmd = 'set_metadata_recursive("%s", "uid", %d, "gid", %d, ' \
'"dmode", 0%o, "fmode", 0%o, "capabilities", %s' \
% (fn, uid, gid, dmode, fmode, capabilities)
if selabel is not None:
cmd += ', "selabel", "%s"' % ( selabel )
cmd += ');'
self.script.append(cmd)
def MakeSymlinks(self, symlink_list):
"""Create symlinks, given a list of (dest, link) pairs."""
by_dest = {}
for d, l in symlink_list:
by_dest.setdefault(d, []).append(l)
for dest, links in sorted(by_dest.iteritems()):
cmd = ('symlink("%s", ' % (dest,) +
",\0".join(['"' + i + '"' for i in sorted(links)]) + ");")
self.script.append(self._WordWrap(cmd))
def AppendExtra(self, extra):
"""Append text verbatim to the output script."""
self.script.append(extra)
def UnmountAll(self):
for p in sorted(self.mounts):
self.script.append('unmount("%s");' % (p,))
self.mounts = set()
def AddToZip(self, input_zip, output_zip, input_path=None):
"""Write the accumulated script to the output_zip file. input_zip
is used as the source for the 'updater' binary needed to run
script. If input_path is not None, it will be used as a local
path for the binary instead of input_zip."""
self.UnmountAll()
common.ZipWriteStr(output_zip, "META-INF/com/google/android/updater-script",
"\n".join(self.script) + "\n")
if input_path is None:
data = input_zip.read("OTA/bin/updater")
else:
data = open(os.path.join(input_path, "updater")).read()
common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary",
data, perms=0755)
|
anthonyfok/frescobaldi | refs/heads/master | frescobaldi_app/qpopplerview/kineticscrollarea.py | 1 | # This file is part of the qpopplerview package.
#
# Copyright (c) 2010 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
KineticScrollArea widget to provide kinetic scrolling moves.
"""
from PyQt5.QtCore import QPoint, QBasicTimer, QEvent, Qt, pyqtSignal
from PyQt5.QtGui import QCursor, QMouseEvent
from PyQt5.QtWidgets import QScrollArea, QApplication
from math import sqrt
import copy
# most used keyboard modifiers
_SCAM = (Qt.SHIFT | Qt.CTRL | Qt.ALT | Qt.META)
def deaccelerate(speed, a=1, maxVal=64):
x = qBound(-maxVal, speed.x(), maxVal)
y = qBound(-maxVal, speed.y(), maxVal)
if x > 0:
x = max(0, x - a)
elif x < 0:
x = min(0, x + a)
if y > 0:
y = max(0, y - a)
elif y < 0:
y = min(0, y + a)
return QPoint(x, y)
def qBound(minVal, current, maxVal):
return max(min(current, maxVal), minVal)
# Centralize data for kinetic scrolling
class KineticData:
Steady = 0
Pressed = 1
ManualScroll = 2
AutoScroll = 3
Stop = 4
def __init__(self):
self._state = KineticData.Steady
self._pressPos = QPoint(0, 0)
self._offset = QPoint(0, 0)
self._dragPos = QPoint(0, 0)
self._speed = QPoint(0, 0)
self._maxSpeed = 64
self._ignored = []
self._ticker = QBasicTimer()
def ignoreEvent(self, ev):
found = False
ignored = []
for event in self._ignored:
if event == ev:
found = True
else:
ignored.append(event)
if found :
self._ignored = ignored
return found
import array
class KineticScrollArea(QScrollArea):
# signal emitted when kinetic scrolling starts/stops, to make it possible
# to shut down some event listeners until we're done.
kineticScrollingActive = pyqtSignal(bool)
cursorNeedUpdate = pyqtSignal(QPoint)
def __init__(self, parent=None):
super(KineticScrollArea, self).__init__(parent)
# kinetic scrolling
self._kineticScrollingEnabled = False
self._scrollFuncIndex = 0
# Functions pointers, index 0 -> kinetic, index 1 -> classic.
self._scrollBy = [
self.kineticScrollBy,
self.fastScrollBy
]
self._center = [
self.kineticCenter,
self.fastCenter
]
self._ensureVisible = [
self.kineticEnsureVisible,
super(KineticScrollArea, self).ensureVisible
]
self._scrollbarsVisible = True
self._kineticData=KineticData()
self._dragging = False
def setScrollbarsVisible(self, enabled):
"""Sets the scrollbars visibility status."""
self._scrollbarsVisible = enabled
if enabled:
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
else:
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
def scrollbarsVisible(self):
"""Returns the current scrollbars visibility status"""
return self._scrollbarsVisible
def setKineticScrolling(self, enabled):
"""Sets whether kinetic scrolling is enabled or not."""
self._kineticScrollingEnabled = enabled
if enabled:
self._scrollFuncIndex = 0
else:
self._scrollFuncIndex = 1
def kineticScrollingEnabled(self):
"""Returns whether kinetic scrolling is enabled."""
return self._kineticScrollingEnabled
def kineticIsIdle(self):
return self._kineticData._state == KineticData.Steady
def scrollOffset(self):
"""Get the current scroll position."""
x = self.horizontalScrollBar().value()
y = self.verticalScrollBar().value()
return QPoint(x, y)
def setScrollOffset(self, p):
"""Set the current scroll position. Returns true if at last one of (x,y) was really modified."""
start_p = self.scrollOffset()
self.horizontalScrollBar().setValue(p.x())
self.verticalScrollBar().setValue(p.y())
# return true if at least one coordinate specified was respected and requested move was not 0.
end_p = self.scrollOffset()
return (start_p.x() != p.x() and end_p.x() == p.x()) or (start_p.y() != p.y() and end_p.y() == p.y())
def fastScrollBy(self, diff):
"""Immediately Scrolls by the distance given in the QPoint diff."""
v = self.verticalScrollBar()
h = self.horizontalScrollBar()
v.setValue(v.value() + diff.y())
h.setValue(h.value() + diff.x())
def kineticScrollBy(self, diff):
"""Kinetically Scrolls by the distance given in the QPoint diff."""
v = self.verticalScrollBar()
h = self.horizontalScrollBar()
self.kineticMove(h.value(), v.value(), h.value()+diff.x(), v.value()+diff.y())
def scrollBy(self, diff):
"""Scrolls by the distance given in the QPoint diff.
Scrolling will either be immediate or kinetic.
"""
self._scrollBy[self._scrollFuncIndex](diff)
def fastCenter(self, point):
"""Immediately center the view on the given QPoint"""
diff = point - self.viewport().rect().center() + self.widget().pos()
self.fastScrollBy(diff)
def kineticCenter(self, point):
"""Kinetically center the view on the given QPoint"""
size = self.widget().viewportRect().size()
self.kineticEnsureVisible(point.x(), point.y(),
size.width() // 2, size.height() // 2)
def center(self, point, overrideKinetic=False):
"""Centers the given QPoint of the widget.
Centering will either be immediate or kinetic."""
self._center[self._scrollFuncIndex](point)
def kineticMove(self, oldx, oldy, newx, newy ):
"""Start a kinetic move from (oldx, oldy) to (newx, newy)"""
if newx == oldx and newy == oldy:
return
speed = QPoint(0,0)
# solve speed*(speed+1)/2 = delta to ensure 1+2+3+...+speed is as close as possible under delta..
speed.setX((sqrt(1+8*abs(newx-oldx))-1)/2)
speed.setY((sqrt(1+8*abs(newy-oldy))-1)/2)
# compute the amount of displacement still needed because we're dealing with integer values.
diff = QPoint(0,0)
diff.setX((speed.x() * (speed.x() + 1) // 2) - abs(newx - oldx))
diff.setY((speed.y() * (speed.y() + 1) // 2) - abs(newy - oldy))
# Since this function is called for exact moves (not free scrolling)
# limit the kinetic time to 2 seconds, which means 100 ticks, 5050 pixels.
if speed.y() > 100:
speed.setY(100)
diff.setY(-abs(newy-oldy) + 5050)
# Although it is less likely to go beyond that limit for horizontal scrolling,
# do it for x as well.
if speed.x() > 100:
speed.setX(100)
diff.setX(-abs(newx-oldx) + 5050)
# move left or right, up or down
if newx > oldx :
speed.setX(-speed.x())
diff.setX(-diff.x())
if newy > oldy :
speed.setY(-speed.y())
diff.setY(-diff.y())
# move immediately by the step that cannot be handled by kinetic scrolling.
# By construction that step is smaller that the initial speed value.
self.fastScrollBy(diff)
self.kineticStart(speed)
def kineticAddDelta(self, delta):
"""Add a kinetic delta to an already started kinetic move.
Delta is a QPoint, with respectively the changes in x and y position.
"""
def compute_speed(s, d):
if d:
# Get the remaining scroll amount.
currentSpeed = abs( s )
leftToScroll = (currentSpeed + 1) * currentSpeed // 2
if s < 0:
leftToScroll *= -1
leftToScroll += d
s = (sqrt(1+8*abs(leftToScroll))-1)/2
if leftToScroll < 0:
s = -s
return s
speed_x = compute_speed(self._kineticData._speed.x(), delta.x())
speed_y = compute_speed(self._kineticData._speed.y(), delta.y())
speed = QPoint(speed_x, speed_y)
self.kineticStart(speed)
def kineticStart(self, speed):
"""Start kinetic scrolling with a given speed. Speed will be decremented periodically
until scrolling halts."""
# Setup the kinetic displacement speed, removing the speed limit imposed on
# interactive scrolling.
self._kineticData._speed = speed
# speed limit is one above speed, to make sure there will be none.
self._kineticData._maxSpeed = max(abs(speed.x()), abs(speed.y())) + 1
# Set kinetic state to AutoScroll, the reference position to the current view center,
# and fire the timer.
self._kineticData._state = KineticData.AutoScroll
self._kineticData._dragPos = self.pos()
if not self._kineticData._ticker.isActive():
self._kineticData._ticker.start(20, self)
self.kineticScrollingActive.emit(True)
def kineticTicksLeft(self):
"""Return the number of ticks left on the kinetic counter."""
if( self._kineticData._state == KineticData.AutoScroll
or self._kineticData._state == KineticData.ManualScroll ):
return max( abs(self._kineticData._speed.x()), abs(self._kineticData._speed.y()) )
return 0
def kineticEnsureVisible(self, x, y, xm, ym):
"""Ensure a given point is visible, with a margin, by starting the appropriate kinetic scrolling."""
# Replicate the logic in ScrollArea::ensureVisible to compute the
# scrollbar displacements, per Qt sources.
oldx = self.horizontalScrollBar().value()
oldy = self.verticalScrollBar().value()
newx = oldx
if x-xm < oldx :
newx = max(0, x - xm)
elif x > oldx + self.viewport().width() - xm:
newx = min(x - self.viewport().width() + xm, self.verticalScrollBar().maximum())
newy = oldy
if y-ym < oldy :
newy = max(0, y - ym)
elif y > oldy + self.viewport().height() - ym:
newy = min(y - self.viewport().height() + ym, self.verticalScrollBar().maximum())
self.kineticMove(oldx, oldy, newx, newy)
def ensureVisible(self, x, y, xm=50, ym=50):
"""
Reimplement ensureVisible to call the kinetic scroller timer if kinetic scrolling is enabled.
"""
self._ensureVisible[self._scrollFuncIndex](x, y, xm, ym)
def wheelEvent(self, ev):
"""Kinetic wheel movements, if enabled."""
if self._kineticScrollingEnabled:
self.kineticAddDelta(ev.angleDelta())
else:
super(KineticScrollArea, self).wheelEvent(ev)
def keyPressEvent(self, ev):
"""Kinetic cursor movements, if enabled."""
if self._kineticScrollingEnabled:
if ev.key() == Qt.Key_PageDown:
self.kineticAddDelta(QPoint(0, -self.verticalScrollBar().pageStep()))
return
elif ev.key() == Qt.Key_PageUp:
self.kineticAddDelta(QPoint(0, self.verticalScrollBar().pageStep()))
return
elif ev.key() == Qt.Key_Down:
self.kineticAddDelta(QPoint(0, -self.verticalScrollBar().singleStep()))
return
elif ev.key() == Qt.Key_Up:
self.kineticAddDelta(QPoint(0, self.verticalScrollBar().singleStep()))
return
elif ev.key() == Qt.Key_Home:
self.kineticMove(0, self.verticalScrollBar().value(), 0, 0)
return
elif ev.key() == Qt.Key_End:
self.kineticMove(0, self.verticalScrollBar().value(), 0, self.verticalScrollBar().maximum())
return
else:
# Home/End are not handled by default.
if ev.key() == Qt.Key_Home:
self.setScrollOffset(QPoint(0,0))
return
elif ev.key() == Qt.Key_End:
self.setScrollOffset(QPoint(self.horizontalScrollBar().maximum(), self.verticalScrollBar().maximum()))
return
super(KineticScrollArea, self).keyPressEvent(ev)
def mousePressEvent(self, ev):
"""Handle mouse press for dragging start/stop."""
if ev.button() == Qt.LeftButton:
self._dragPos = ev.globalPos()
if self._kineticScrollingEnabled:
# kinetic scrolling
if self._kineticData.ignoreEvent(ev):
return
if self._kineticData._state == KineticData.Steady or self._kineticData._state == KineticData.Stop:
self._dragging = True
self._kineticData._state = KineticData.Pressed
self._kineticData._pressPos = copy.copy(ev.pos())
self._kineticData._offset = self.scrollOffset()
self._kineticData._maxSpeed = 64 #limit speed.
elif self._kineticData._state == KineticData.AutoScroll:
self._dragging = True
self._kineticData._state = KineticData.Stop
self._kineticData._speed = QPoint(0,0)
else:
self._dragging = True
super(KineticScrollArea, self).mousePressEvent(ev)
def mouseReleaseEvent(self, ev):
"""Handle mouse release events for kinetic dragging end/auto mode."""
if self._dragging:
self._dragging = False
self.unsetCursor()
if self._kineticScrollingEnabled:
# kinetic scrolling
if self._kineticData.ignoreEvent(ev):
return
if self._kineticData._state == KineticData.Pressed:
self._kineticData._state = KineticData.Steady
event1 = QMouseEvent(QEvent.MouseButtonPress,
self._kineticData._pressPos, Qt.LeftButton,
Qt.LeftButton, Qt.NoModifier)
event2 = QMouseEvent(ev)
self._kineticData._ignored.append(event1)
self._kineticData._ignored.append(event2)
QApplication.postEvent(self, event1)
QApplication.postEvent(self, event2)
elif self._kineticData._state == KineticData.ManualScroll:
self._kineticData._state = KineticData.AutoScroll
elif self._kineticData._state == KineticData.AutoScroll:
self._kineticData._state = KineticData.Stop
self._kineticData._speed = QPoint(0, 0)
elif self._kineticData._state == KineticData.Stop:
self._kineticData._state = KineticData.Steady
if self._kineticData._state == KineticData.Steady:
self.cursorNeedUpdate.emit(ev.globalPos())
super(KineticScrollArea, self).mouseReleaseEvent(ev)
def mouseMoveEvent(self, ev):
"""Handle mouse move events for kinetic dragging timer firing.
Notifies cursor needs update if no kinetic move is active.
"""
if self._dragging:
self.setCursor(Qt.SizeAllCursor)
if self._kineticScrollingEnabled:
# kinetic scrolling
if self._kineticData.ignoreEvent(ev):
return
if self._kineticData._state == KineticData.Pressed:
self._kineticData._state = KineticData.ManualScroll
self._kineticData._dragPos = QCursor.pos()
if not self._kineticData._ticker.isActive():
self._kineticData._ticker.start(20, self)
self.kineticScrollingActive.emit(True)
elif self._kineticData._state == KineticData.ManualScroll:
diff = self._dragPos - ev.globalPos()
self._dragPos = ev.globalPos()
self.fastScrollBy(diff)
elif self._kineticData._state == KineticData.Stop:
self._kineticData._state = KineticData.ManualScroll
self._kineticData._dragPos = QCursor.pos()
if not self._kineticData._ticker.isActive():
self._kineticData._ticker.start(20, self)
self.kineticScrollingActive.emit(True)
else:
diff = self._dragPos - ev.globalPos()
self._dragPos = ev.globalPos()
self.fastScrollBy(diff)
super(KineticScrollArea, self).mouseMoveEvent(ev)
if self.kineticIsIdle():
self.cursorNeedUpdate.emit(ev.globalPos())
def moveEvent(self, ev):
"""Move event handler. Passes the event to the base class and notify the cursor needs update."""
super(KineticScrollArea, self).moveEvent(ev)
if self.kineticIsIdle():
self.cursorNeedUpdate.emit(QCursor.pos())
def timerEvent(self, event):
"""Handle events sent by the kinetic timer to decrease progressively
the scrolling speed, eventually halting it.
"""
count = 0
if self._kineticData._state == KineticData.ManualScroll:
count += 1
cursorPos = QCursor.pos()
self._kineticData._speed = cursorPos - self._kineticData._dragPos
self._kineticData._dragPos = cursorPos
elif self._kineticData._state == KineticData.AutoScroll:
count += 1
p = self.scrollOffset()
if self._kineticData._speed == QPoint(0, 0) or not self.setScrollOffset(p - self._kineticData._speed):
self._kineticData._state = KineticData.Steady
# reset speed to 0, as wheel scrolling accumulates speed instead of setting it to a fixed value.
self._kineticData._speed = QPoint(0,0)
# reset count to 0 to stop iterating.
count = 0
self._kineticData._speed = deaccelerate(self._kineticData._speed, 1, self._kineticData._maxSpeed)
if count == 0:
self._kineticData._ticker.stop()
self.kineticScrollingActive.emit(False)
super(KineticScrollArea, self).timerEvent(event)
|
hivesolutions/appier | refs/heads/master | src/appier/validation.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Appier Framework
# Copyright (c) 2008-2021 Hive Solutions Lda.
#
# This file is part of Hive Appier Framework.
#
# Hive Appier Framework is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Appier Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <joamag@hive.pt>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2021 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import re
import copy
import datetime
from . import util
from . import common
from . import legacy
from . import exceptions
SIMPLE_REGEX_VALUE = "^[\:\.\s\w-]+$"
""" The simple regex value used to validate
if the provided value is a "simple" one meaning
that it may be used safely for URL parts """
EMAIL_REGEX_VALUE = "^[\w\d\._%+-]+@[\w\d\.\-]+$"
""" The email regex value used to validate
if the provided value is in fact an email """
URL_REGEX_VALUE = "^\w+\:\/\/([^@]+\:[^@]+@)?[^\:\/\?#]+(\:\d+)?(\/[^\?#]+)*\/?(\?[^#]*)?(#.*)?$"
""" The URL regex value used to validate
if the provided value is in fact an URL/URI """
SIMPLE_REGEX = re.compile(SIMPLE_REGEX_VALUE)
""" The simple regex used to validate
if the provided value is a "simple" one meaning
that it may be used safely for URL parts """
EMAIL_REGEX = re.compile(EMAIL_REGEX_VALUE)
""" The email regex used to validate
if the provided value is in fact an email """
URL_REGEX = re.compile(URL_REGEX_VALUE)
""" The URL regex used to validate
if the provided value is in fact an URL/URI """
def validate(method = None, methods = [], object = None, ctx = None, build = True):
# retrieves the base request object that is going to be used in
# the construction of the object
request = common.base().get_request()
# uses the provided method to retrieves the complete
# set of methods to be used for validation, this provides
# an extra level of indirection
methods = method() if method else methods
errors = []
# verifies if the provided object is valid in such case creates
# a copy of it and uses it as the base object for validation
# otherwise used an empty map (form validation)
object = object and copy.copy(object) or {}
# in case the build flag is set must process the received request
# to correctly retrieve populate the object from it
if build:
# retrieves the current request data and tries to
# "load" it as JSON data, in case it fails gracefully
# handles the failure setting the value as an empty map
data_j = util.request_json()
# uses all the values referencing data in the request to try
# to populate the object this way it may be constructed using
# any of theses strategies (easier for the developer)
for name, value in data_j.items(): object[name] = value
for name, value in request.files_s.items(): object[name] = value
for name, value in request.post_s.items(): object[name] = value
for name, value in request.params_s.items(): object[name] = value
# iterates over the complete set of methods registered for validation
# and runs them expecting exceptions to be raised from them, each adding
# new errors to the current errors stack
for method in methods:
try: method(object, ctx = ctx)
except exceptions.ValidationMultipleError as error:
errors.extend(error.errors)
except exceptions.ValidationInternalError as error:
errors.append((error.name, error.message))
# creates the map that will be used to store the association between the
# field name of the object and the validation errors associated with it
errors_map = dict()
for name, message in errors:
if not name in errors_map: errors_map[name] = []
_errors = errors_map[name]
_errors.append(message)
# returns both the newly created errors map that associates each of the
# model name with the sequence of errors and the validated object (state)
return errors_map, object
def validate_b(method = None, methods = [], object = None, ctx = None, build = True):
errors_map, object = validate(
method = method,
methods = methods,
object = object,
ctx = ctx,
build = build
)
result = False if errors_map else True
return result
def validate_e(method = None, methods = [], object = None, ctx = None, build = True):
errors_map, object = validate(
method = method,
methods = methods,
object = object,
ctx = ctx,
build = build
)
if not errors_map: return
raise exceptions.ValidationError(errors_map, object)
def safe(comparision):
try: return comparision()
except TypeError: return False
def eq(name, value_c, message = "must be equal to %s", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == value_c: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % str(value_c))
return validation
def gt(name, value_c, message = "must be greater than %s", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if safe(lambda: value > value_c): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % str(value_c))
return validation
def gte(name, value_c, message = "must be greater than or equal to %s", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if safe(lambda: value >= value_c): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % str(value_c))
return validation
def lt(name, value_c, message = "must be less than %s", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if safe(lambda: value < value_c): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % str(value_c))
return validation
def lte(name, value_c, message = "must be less than or equal to %s", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if safe(lambda: value <= value_c): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % str(value_c))
return validation
def not_null(name, message = "value is not set", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if not value == None: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def not_empty(name, message = "value is empty", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if len(value): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def not_false(name, message = "value is false", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if not value == False: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_in(name, values, message = "value is not in set", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value in values: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_upper(name, message = "value contains lower cased characters", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if value == value.upper(): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_lower(name, message = "value contains upper cased characters", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if value == value.lower(): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_simple(name, message = "value contains invalid characters", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if SIMPLE_REGEX.match(value): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_email(name, message = "value is not a valid email", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if EMAIL_REGEX.match(value): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_url(name, message = "value is not a valid URL", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if URL_REGEX.match(value): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def is_regex(name, regex, message = "value has incorrect format", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
match = re.match(regex, value)
if match: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def field_eq(name, field, message = "must be equal to %s", locale = True):
def validation(object, ctx):
name_v = object.get(name, None)
field_v = object.get(field, None)
if name_v == None: return True
if field_v == None: return True
if name_v == field_v: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % field)
return validation
def field_gt(name, field, message = "must be greater than %s", locale = True):
def validation(object, ctx):
name_v = object.get(name, None)
field_v = object.get(field, None)
if name_v == None: return True
if field_v == None: return True
if safe(lambda: name_v > field_v): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % field)
return validation
def field_gte(name, field, message = "must be greater or equal than %s", locale = True):
def validation(object, ctx):
name_v = object.get(name, None)
field_v = object.get(field, None)
if name_v == None: return True
if field_v == None: return True
if safe(lambda: name_v >= field_v): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % field)
return validation
def field_lt(name, field, message = "must be less than %s", locale = True):
def validation(object, ctx):
name_v = object.get(name, None)
field_v = object.get(field, None)
if name_v == None: return True
if field_v == None: return True
if safe(lambda: name_v < field_v): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % field)
return validation
def field_lte(name, field, message = "must be less or equal than %s", locale = True):
def validation(object, ctx):
name_v = object.get(name, None)
field_v = object.get(field, None)
if name_v == None: return True
if field_v == None: return True
if safe(lambda: name_v <= field_v): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % field)
return validation
def string_gt(name, size, message = "must be larger than %d characters", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if len(value) > size: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % size)
return validation
def string_lt(name, size, message = "must be smaller than %d characters", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if len(value) < size: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % size)
return validation
def string_eq(name, size, message = "must be exactly %d characters", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value == "": return True
if len(value) == size: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l % size)
return validation
def equals(first_name, second_name, message = "value is not equal to %s", locale = True):
def validation(object, ctx):
first_value = object.get(first_name, None)
second_value = object.get(second_name, None)
if first_value == None: return True
if second_value == None: return True
if first_value == second_value: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(first_name, message_l % second_name)
return validation
def not_past(name, message = "date is in the past", locale = True):
def validation(object, ctx):
value = object.get(name, None)
if value == None: return True
if value >= datetime.datetime.utcnow(): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def not_duplicate(name, collection, message = "value is duplicate", locale = True):
def validation(object, ctx):
_id = object.get("_id", None)
value = object.get(name, None)
if value == None: return True
if value == "": return True
adapter = common.base().get_adapter()
_collection = adapter.collection(collection)
item = _collection.find_one({name : value})
if not item: return True
if str(item["_id"]) == str(_id): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def all_different(name, name_ref = None, message = "has duplicates", locale = True):
def validation(object, ctx):
# uses the currently provided context to retrieve
# the definition of the name to be validation and
# in it's a valid relation type tries to retrieve
# the underlying referenced name otherwise default
# to the provided one or the id name
cls = ctx.__class__
definition = cls.definition_n(name)
type = definition.get("type", legacy.UNICODE)
_name_ref = name_ref or (hasattr(type, "_name") and type._name or "id")
# tries to retrieve both the value for the identifier
# in the current object and the values of the sequence
# that is going to be used for all different matching in
# case any of them does not exist returns valid
value = object.get(name, None)
if value == None: return True
if len(value) == 0: return True
# verifies if the sequence is in fact a proxy object and
# contains the ids attribute in case that's the case the
# ids attributes is retrieved as the sequence instead
if hasattr(value, "ids"): values = value.ids
# otherwise this is a normal sequence and the it must be
# iterates to check if the reference name should be retrieve
# or if the concrete values should be used instead
else: values = [getattr(_value, _name_ref) if hasattr(_value, _name_ref) else _value\
for _value in value]
# creates a set structure from the the sequence of values
# and in case the size of the sequence and the set are the
# same the sequence is considered to not contain duplicates
values_set = set(values)
if len(value) == len(values_set): return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def no_self(name, name_ref = None, message = "contains self", locale = True):
def validation(object, ctx):
# uses the currently provided context to retrieve
# the definition of the name to be validation and
# in it's a valid relation type tries to retrieve
# the underlying referenced name otherwise default
# to the provided one or the id name
cls = ctx.__class__
definition = cls.definition_n(name)
type = definition.get("type", legacy.UNICODE)
_name_ref = name_ref or (hasattr(type, "_name") and type._name or "id")
# tries to retrieve both the value for the identifier
# in the current object and the values of the sequence
# that is going to be used for existence matching in
# case any of them does not exist returns valid
_id = object.get(_name_ref, None)
value = object.get(name, None)
if _id == None: return True
if value == None: return True
# verifies if the sequence is in fact a proxy object and
# contains the ids attribute in case that's the case the
# ids attributes is retrieved as the sequence instead
if hasattr(value, "ids"): values = value.ids
# otherwise this is a normal sequence and the it must be
# iterates to check if the reference name should be retrieve
# or if the concrete values should be used instead
else: values = [getattr(_value, _name_ref) if hasattr(_value, _name_ref) else _value\
for _value in value]
# verifies if the current identifier value exists in the
# sequence and if that's the case raises the validation
# exception indicating the validation problem
exists = _id in values
if not exists: return True
message_l = _to_locale(message) if locale else message
raise exceptions.ValidationInternalError(name, message_l)
return validation
def _to_locale(value):
return common.base().to_locale(value)
|
SteveHNH/ansible | refs/heads/devel | lib/ansible/modules/network/illumos/ipadm_prop.py | 33 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Adam Števko <adam.stevko@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipadm_prop
short_description: Manage protocol properties on Solaris/illumos systems.
description:
- Modify protocol properties on Solaris/illumos systems.
version_added: "2.2"
author: Adam Števko (@xen0l)
options:
protocol:
description:
- Specifies the procotol for which we want to manage properties.
required: true
property:
description:
- Specifies the name of property we want to manage.
required: true
value:
description:
- Specifies the value we want to set for the property.
required: false
temporary:
description:
- Specifies that the property value is temporary. Temporary
property values do not persist across reboots.
required: false
default: false
choices: [ "true", "false" ]
state:
description:
- Set or reset the property value.
required: false
default: present
choices: [ "present", "absent", "reset" ]
'''
EXAMPLES = '''
# Set TCP receive buffer size
ipadm_prop: protocol=tcp property=recv_buf value=65536
# Reset UDP send buffer size to the default value
ipadm_prop: protocol=udp property=send_buf state=reset
'''
RETURN = '''
protocol:
description: property's protocol
returned: always
type: string
sample: "TCP"
property:
description: name of the property
returned: always
type: string
sample: "recv_maxbuf"
state:
description: state of the target
returned: always
type: string
sample: "present"
temporary:
description: property's persistence
returned: always
type: boolean
sample: "True"
value:
description: value of the property. May be int or string depending on property.
returned: always
type: int
sample: "'1024' or 'never'"
'''
from ansible.module_utils.basic import AnsibleModule
SUPPORTED_PROTOCOLS = ['ipv4', 'ipv6', 'icmp', 'tcp', 'udp', 'sctp']
class Prop(object):
def __init__(self, module):
self.module = module
self.protocol = module.params['protocol']
self.property = module.params['property']
self.value = module.params['value']
self.temporary = module.params['temporary']
self.state = module.params['state']
def property_exists(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-prop')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.protocol)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
self.module.fail_json(msg='Unknown property "%s" for protocol %s' %
(self.property, self.protocol),
protocol=self.protocol,
property=self.property)
def property_is_modified(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-prop')
cmd.append('-c')
cmd.append('-o')
cmd.append('current,default')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.protocol)
(rc, out, _) = self.module.run_command(cmd)
out = out.rstrip()
(value, default) = out.split(':')
if rc == 0 and value == default:
return True
else:
return False
def property_is_set(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-prop')
cmd.append('-c')
cmd.append('-o')
cmd.append('current')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.protocol)
(rc, out, _) = self.module.run_command(cmd)
out = out.rstrip()
if rc == 0 and self.value == out:
return True
else:
return False
def set_property(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('set-prop')
if self.temporary:
cmd.append('-t')
cmd.append('-p')
cmd.append(self.property + "=" + self.value)
cmd.append(self.protocol)
return self.module.run_command(cmd)
def reset_property(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('reset-prop')
if self.temporary:
cmd.append('-t')
cmd.append('-p')
cmd.append(self.property)
cmd.append(self.protocol)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
protocol=dict(required=True, choices=SUPPORTED_PROTOCOLS),
property=dict(required=True),
value=dict(required=False),
temporary=dict(default=False, type='bool'),
state=dict(
default='present', choices=['absent', 'present', 'reset']),
),
supports_check_mode=True
)
prop = Prop(module)
rc = None
out = ''
err = ''
result = {}
result['protocol'] = prop.protocol
result['property'] = prop.property
result['state'] = prop.state
result['temporary'] = prop.temporary
if prop.value:
result['value'] = prop.value
if prop.state == 'absent' or prop.state == 'reset':
if prop.property_exists():
if not prop.property_is_modified():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = prop.reset_property()
if rc != 0:
module.fail_json(protocol=prop.protocol,
property=prop.property,
msg=err,
rc=rc)
elif prop.state == 'present':
if prop.value is None:
module.fail_json(msg='Value is mandatory with state "present"')
if prop.property_exists():
if not prop.property_is_set():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = prop.set_property()
if rc != 0:
module.fail_json(protocol=prop.protocol,
property=prop.property,
msg=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
|
lsp84ch83/PyText | refs/heads/master | 公开课/爬虫/download.py | 1 | import urllib.request
import os
def url_open(url):
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36')
response = urllib.request.urlopen(url)
html = response.read()
return html
def get_page(url):
html = url_open(url).decode('utf-8')
# 查找current-comment-page
a = html.find('current-comment-page') + 23
b = html.find(']', a) # 从a开始,找到第一个],返回索引
return html[a:b]
def find_imgs(url):
html = url_open(url).decode('utf-8')
img_addrs = []
a = html.find('img src=')
while a != -1:
b = html.find('.jpg', a, a + 255)
if b != -1: # 找到一个jpg
img_addrs.append(html[a+9:b+4])
else: # 找不到,移动b的位置
b = a + 9
a = html.find('img src=', b) # 在b之后开始,再找img src
return img_addrs
def save_imgs(folder, img_addrs):
for each in img_addrs:
filename = each.split('/')[-1]
with open(filename,'wb') as f:
img = url_open('http:' + each)
f.write(img)
def download_me(folder='Image', pages = 10):
os.mkdir(folder) # 创建一个目录
os.chdir(folder) # 切换到一个目录
url = 'http://jandan.net/ooxx/'
# 获得页面的地址
page_num = int(get_page(url))
for i in range(pages):
page_num -= i
# 打开地址
page_url = url + 'page-' + str(page_num) + '#comments'
# 获取图片地址保存为一个列表
img_addrs = find_imgs(page_url)
# 保存图标到指定文件夹
save_imgs(folder, img_addrs)
if __name__ =='__main__':
download_me()
|
Kozea/CSStyle | refs/heads/master | setup.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of CSStyle
# Copyright © 2010 Kozea
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CSStyle. If not, see <http://www.gnu.org/licenses/>.
"""
CSStyle - CSS with Style
========================
CSStyle is a simple CSS parser generating CSS adapted for various browsers.
CSStyle Project runs on most of the UNIX-like platforms (Linux, BSD,
MacOS X) and Windows.It is free and open-source software, released under GPL
version 3.
For further information, please visit the `Radicale Website
<http://www.radicale.org/>`_.
"""
import os
from distutils.core import setup
from distutils.command.build_scripts import build_scripts
import csstyle
# build_scripts is known to have a lot of public methods
# pylint: disable=R0904
class BuildScripts(build_scripts):
"""Build the package."""
def run(self):
"""Run building."""
# These lines remove the .py extension from the radicale executable
self.mkpath(self.build_dir)
for script in self.scripts:
root, _ = os.path.splitext(script)
self.copy_file(script, os.path.join(self.build_dir, root))
# pylint: enable=R0904
# When the version is updated, ``csstyle.VERSION`` must be modified.
# A new section in the ``NEWS`` file must be added too.
setup(
name="CSStyle",
version=csstyle.VERSION,
description="CSStyle is a simple CSS parser generating CSS adapted for" + \
" various browsers.",
long_description=__doc__,
author="Kozea",
author_email="guillaume.ayoub@kozea.fr",
url="http://www.csstyle.org/",
download_url="http://gitorious.org/csstyle/csstyle/archive-tarball/master",
license="GNU GPL v3",
platforms="Any",
packages=["csstyle"],
provides=["csstyle"],
scripts=["csstyle.py"],
cmdclass={"build_scripts": BuildScripts},
keywords=["css", "generator"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Environment :: Web Environment",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Topic :: Software Development :: Code Generators"])
|
nkhare/rockstor-core | refs/heads/master | src/rockstor/storageadmin/migrations/0031_auto__add_configbackup.py | 8 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ConfigBackup'
db.create_table(u'storageadmin_configbackup', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('filename', self.gf('django.db.models.fields.CharField')(max_length=64)),
('md5sum', self.gf('django.db.models.fields.CharField')(max_length=32, null=True)),
('size', self.gf('django.db.models.fields.IntegerField')(null=True)),
('config_backup', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
))
db.send_create_signal('storageadmin', ['ConfigBackup'])
def backwards(self, orm):
# Deleting model 'ConfigBackup'
db.delete_table(u'storageadmin_configbackup')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'oauth2_provider.application': {
'Meta': {'object_name': 'Application'},
'authorization_grant_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "u'M3Oo6MHQqG_hCkG_46i9!9CHXU7XfjA;WOI_CGO7'", 'unique': 'True', 'max_length': '100'}),
'client_secret': ('django.db.models.fields.CharField', [], {'default': "u'7P3SuF6qmmhm6YekTsMYPl?iPfeqkVO:NtDFZ0Dcb7eT70ZAGkiO;5b;jGhZpZ!!ElWoxFk-3=R-U:LQcJ8OTq-3:QPhAQp1Szsk7GtCnyWtHg4gqk2NWhoPRGa=QOv5'", 'max_length': '255', 'blank': 'True'}),
'client_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'storageadmin.advancednfsexport': {
'Meta': {'object_name': 'AdvancedNFSExport'},
'export_str': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'storageadmin.apikeys': {
'Meta': {'object_name': 'APIKeys'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '8'})
},
'storageadmin.appliance': {
'Meta': {'object_name': 'Appliance'},
'client_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'client_secret': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'current_appliance': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "'Rockstor'", 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'mgmt_port': ('django.db.models.fields.IntegerField', [], {'default': '443'}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'storageadmin.configbackup': {
'Meta': {'object_name': 'ConfigBackup'},
'config_backup': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5sum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'storageadmin.containeroption': {
'Meta': {'object_name': 'ContainerOption'},
'container': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'val': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'})
},
'storageadmin.dashboardconfig': {
'Meta': {'object_name': 'DashboardConfig'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'}),
'widgets': ('django.db.models.fields.CharField', [], {'max_length': '4096'})
},
'storageadmin.dcontainer': {
'Meta': {'object_name': 'DContainer'},
'dimage': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DImage']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'launch_order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1024'}),
'rockon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.RockOn']"})
},
'storageadmin.dcontainerlink': {
'Meta': {'unique_together': "(('destination', 'name'),)", 'object_name': 'DContainerLink'},
'destination': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'destination_container'", 'to': "orm['storageadmin.DContainer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'source': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['storageadmin.DContainer']", 'unique': 'True'})
},
'storageadmin.dcustomconfig': {
'Meta': {'unique_together': "(('rockon', 'key'),)", 'object_name': 'DCustomConfig'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'rockon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.RockOn']"}),
'val': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'})
},
'storageadmin.dimage': {
'Meta': {'object_name': 'DImage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'storageadmin.disk': {
'Meta': {'object_name': 'Disk'},
'btrfs_uuid': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'offline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parted': ('django.db.models.fields.BooleanField', [], {}),
'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'serial': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'smart_available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'smart_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'transport': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'vendor': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'})
},
'storageadmin.dport': {
'Meta': {'unique_together': "(('container', 'containerp'),)", 'object_name': 'DPort'},
'container': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']"}),
'containerp': ('django.db.models.fields.IntegerField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'hostp': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
'hostp_default': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'uiport': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'storageadmin.dvolume': {
'Meta': {'unique_together': "(('container', 'dest_dir'),)", 'object_name': 'DVolume'},
'container': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.DContainer']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'dest_dir': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'min_size': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']", 'null': 'True'}),
'uservol': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'storageadmin.group': {
'Meta': {'object_name': 'Group'},
'admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'gid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
'groupname': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'storageadmin.installedplugin': {
'Meta': {'object_name': 'InstalledPlugin'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'install_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'plugin_meta': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Plugin']"})
},
'storageadmin.iscsitarget': {
'Meta': {'object_name': 'IscsiTarget'},
'dev_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'dev_size': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']"}),
'tid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
'tname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'storageadmin.netatalkshare': {
'Meta': {'object_name': 'NetatalkShare'},
'description': ('django.db.models.fields.CharField', [], {'default': "'afp on rockstor'", 'max_length': '1024'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'share': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'netatalkshare'", 'unique': 'True', 'to': "orm['storageadmin.Share']"}),
'time_machine': ('django.db.models.fields.CharField', [], {'default': "'yes'", 'max_length': '3'})
},
'storageadmin.networkinterface': {
'Meta': {'object_name': 'NetworkInterface'},
'alias': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'boot_proto': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'dns_servers': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'gateway': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ipaddr': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'itype': ('django.db.models.fields.CharField', [], {'default': "'io'", 'max_length': '100'}),
'mac': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'netmask': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'network': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'onboot': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'storageadmin.nfsexport': {
'Meta': {'object_name': 'NFSExport'},
'export_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.NFSExportGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mount': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']"})
},
'storageadmin.nfsexportgroup': {
'Meta': {'object_name': 'NFSExportGroup'},
'admin_host': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'editable': ('django.db.models.fields.CharField', [], {'default': "'rw'", 'max_length': '2'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'host_str': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mount_security': ('django.db.models.fields.CharField', [], {'default': "'insecure'", 'max_length': '8'}),
'nohide': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'syncable': ('django.db.models.fields.CharField', [], {'default': "'async'", 'max_length': '5'})
},
'storageadmin.oauthapp': {
'Meta': {'object_name': 'OauthApp'},
'application': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['oauth2_provider.Application']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.User']"})
},
'storageadmin.plugin': {
'Meta': {'object_name': 'Plugin'},
'css_file_name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4096'}),
'display_name': ('django.db.models.fields.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '4096'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'js_file_name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'})
},
'storageadmin.pool': {
'Meta': {'object_name': 'Pool'},
'compression': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mnt_options': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'raid': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'storageadmin.poolbalance': {
'Meta': {'object_name': 'PoolBalance'},
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pid': ('django.db.models.fields.IntegerField', [], {}),
'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']"}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'started'", 'max_length': '10'})
},
'storageadmin.poolscrub': {
'Meta': {'object_name': 'PoolScrub'},
'corrected_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'csum_discards': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'csum_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'data_extents_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kb_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'null': 'True'}),
'last_physical': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'malloc_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'no_csum': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pid': ('django.db.models.fields.IntegerField', [], {}),
'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']"}),
'read_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'started'", 'max_length': '10'}),
'super_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tree_bytes_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'tree_extents_scrubbed': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'uncorrectable_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'unverified_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'verify_errors': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'storageadmin.posixacls': {
'Meta': {'object_name': 'PosixACLs'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'perms': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'smb_share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SambaShare']"})
},
'storageadmin.rockon': {
'Meta': {'object_name': 'RockOn'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'https': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'icon': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'more_info': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'ui': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'volume_add_support': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'})
},
'storageadmin.sambacustomconfig': {
'Meta': {'object_name': 'SambaCustomConfig'},
'custom_config': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'smb_share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SambaShare']"})
},
'storageadmin.sambashare': {
'Meta': {'object_name': 'SambaShare'},
'browsable': ('django.db.models.fields.CharField', [], {'default': "'yes'", 'max_length': '3'}),
'comment': ('django.db.models.fields.CharField', [], {'default': "'foo bar'", 'max_length': '100'}),
'guest_ok': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'read_only': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '3'}),
'share': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'sambashare'", 'unique': 'True', 'to': "orm['storageadmin.Share']"})
},
'storageadmin.setup': {
'Meta': {'object_name': 'Setup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'setup_disks': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'setup_network': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'setup_system': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'setup_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'storageadmin.sftp': {
'Meta': {'object_name': 'SFTP'},
'editable': ('django.db.models.fields.CharField', [], {'default': "'ro'", 'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'share': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['storageadmin.Share']", 'unique': 'True'})
},
'storageadmin.share': {
'Meta': {'object_name': 'Share'},
'compression_algo': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'eusage': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'group': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '4096'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'owner': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '4096'}),
'perms': ('django.db.models.fields.CharField', [], {'default': "'755'", 'max_length': '9'}),
'pool': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Pool']"}),
'qgroup': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'replica': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'rusage': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'subvol_name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'storageadmin.smartattribute': {
'Meta': {'object_name': 'SMARTAttribute'},
'aid': ('django.db.models.fields.IntegerField', [], {}),
'atype': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'failed': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'flag': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'normed_value': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'raw_value': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'threshold': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'worst': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'storageadmin.smartcapability': {
'Meta': {'object_name': 'SMARTCapability'},
'capabilities': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'flag': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'storageadmin.smarterrorlog': {
'Meta': {'object_name': 'SMARTErrorLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'line': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'storageadmin.smarterrorlogsummary': {
'Meta': {'object_name': 'SMARTErrorLogSummary'},
'details': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'error_num': ('django.db.models.fields.IntegerField', [], {}),
'etype': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'lifetime_hours': ('django.db.models.fields.IntegerField', [], {}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'storageadmin.smartidentity': {
'Meta': {'object_name': 'SMARTIdentity'},
'assessment': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'ata_version': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'capacity': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'device_model': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'enabled': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'firmware_version': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_smartdb': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'model_family': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'rotation_rate': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'sata_version': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'scanned_on': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'sector_size': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'serial_number': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'supported': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'world_wide_name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'storageadmin.smartinfo': {
'Meta': {'object_name': 'SMARTInfo'},
'disk': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Disk']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'storageadmin.smarttestlog': {
'Meta': {'object_name': 'SMARTTestLog'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'lba_of_first_error': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'lifetime_hours': ('django.db.models.fields.IntegerField', [], {}),
'pct_completed': ('django.db.models.fields.IntegerField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'test_num': ('django.db.models.fields.IntegerField', [], {})
},
'storageadmin.smarttestlogdetail': {
'Meta': {'object_name': 'SMARTTestLogDetail'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.SMARTInfo']"}),
'line': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'storageadmin.snapshot': {
'Meta': {'unique_together': "(('share', 'name'),)", 'object_name': 'Snapshot'},
'eusage': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'qgroup': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'real_name': ('django.db.models.fields.CharField', [], {'default': "'unknownsnap'", 'max_length': '4096'}),
'rusage': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'share': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Share']"}),
'size': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'snap_type': ('django.db.models.fields.CharField', [], {'default': "'admin'", 'max_length': '64'}),
'toc': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uvisible': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'writable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'storageadmin.supportcase': {
'Meta': {'object_name': 'SupportCase'},
'case_type': ('django.db.models.fields.CharField', [], {'max_length': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '9'}),
'zipped_log': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'storageadmin.tlscertificate': {
'Meta': {'object_name': 'TLSCertificate'},
'certificate': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1024'})
},
'storageadmin.user': {
'Meta': {'object_name': 'User'},
'admin': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'gid': ('django.db.models.fields.IntegerField', [], {'default': '5000'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storageadmin.Group']", 'null': 'True', 'blank': 'True'}),
'homedir': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True', 'blank': 'True'}),
'shell': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'smb_shares': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'admin_users'", 'null': 'True', 'to': "orm['storageadmin.SambaShare']"}),
'uid': ('django.db.models.fields.IntegerField', [], {'default': '5000'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'suser'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"}),
'username': ('django.db.models.fields.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '4096'})
}
}
complete_apps = ['storageadmin'] |
prutseltje/ansible | refs/heads/devel | lib/ansible/modules/cloud/ovirt/ovirt_scheduling_policies_facts.py | 73 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_scheduling_policies_facts
short_description: Retrieve facts about one or more oVirt scheduling policies
author: "Ondra Machacek (@machacekondra)"
version_added: "2.4"
description:
- "Retrieve facts about one or more oVirt scheduling policies."
notes:
- "This module creates a new top-level C(ovirt_scheduling_policies) fact,
which contains a list of scheduling policies."
options:
id:
description:
- "ID of the scheduling policy."
required: true
name:
description:
- "Name of the scheduling policy, can be used as glob expression."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all scheduling policies with name InClusterUpgrade:
- ovirt_scheduling_policies_facts:
name: InClusterUpgrade
- debug:
var: ovirt_scheduling_policies
'''
RETURN = '''
ovirt_scheduling_policies:
description: "List of dictionaries describing the scheduling policies.
Scheduling policies attribues are mapped to dictionary keys,
all scheduling policies attributes can be found at following
url: https://ovirt.example.com/ovirt-engine/api/model#types/scheduling_policy."
returned: On success.
type: list
'''
import fnmatch
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
id=dict(default=None),
name=dict(default=None),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
system_service = connection.system_service()
sched_policies_service = system_service.scheduling_policies_service()
if module.params['name']:
sched_policies = [
e for e in sched_policies_service.list()
if fnmatch.fnmatch(e.name, module.params['name'])
]
elif module.params['id']:
sched_policies = [
sched_policies_service.service(module.params['id']).get()
]
else:
sched_policies = sched_policies_service.list()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_scheduling_policies=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in sched_policies
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
|
nlu90/heron | refs/heads/master | heron/tools/common/src/python/access/tracker_access.py | 4 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''tracker_access.py: util functions for heron explorer and tracker'''
import traceback
import tornado.gen
import tornado.ioloop
from heron.tools.common.src.python.access import heron_api as API
from heron.common.src.python.utils.log import Log
def _all_metric_queries():
queries_normal = ['complete-latency',
'execute-latency',
'process-latency',
'jvm-uptime-secs',
'jvm-process-cpu-load',
'jvm-memory-used-mb']
queries = ['__%s' % m for m in queries_normal]
count_queries_normal = ['emit-count', 'execute-count', 'ack-count', 'fail-count']
count_queries = ['__%s/default' % m for m in count_queries_normal]
return queries, queries_normal, count_queries, count_queries_normal
def metric_queries():
"""all metric queries"""
qs = _all_metric_queries()
return qs[0] + qs[2]
def queries_map():
"""map from query parameter to query name"""
qs = _all_metric_queries()
return dict(zip(qs[0], qs[1]) + zip(qs[2], qs[3]))
def get_clusters():
"""Synced API call to get all cluster names"""
instance = tornado.ioloop.IOLoop.instance()
# pylint: disable=unnecessary-lambda
try:
return instance.run_sync(lambda: API.get_clusters())
except Exception:
Log.debug(traceback.format_exc())
raise
def get_logical_plan(cluster, env, topology, role):
"""Synced API call to get logical plans"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: API.get_logical_plan(cluster, env, topology, role))
except Exception:
Log.debug(traceback.format_exc())
raise
def get_topology_info(*args):
"""Synced API call to get topology information"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: API.get_topology_info(*args))
except Exception:
Log.debug(traceback.format_exc())
raise
def get_topology_metrics(*args):
"""Synced API call to get topology metrics"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: API.get_comp_metrics(*args))
except Exception:
Log.debug(traceback.format_exc())
raise
def get_component_metrics(component, cluster, env, topology, role):
"""Synced API call to get component metrics"""
all_queries = metric_queries()
try:
result = get_topology_metrics(cluster, env, topology, component, [],
all_queries, [0, -1], role)
return result["metrics"]
except Exception:
Log.debug(traceback.format_exc())
raise
def get_cluster_topologies(cluster):
"""Synced API call to get topologies under a cluster"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: API.get_cluster_topologies(cluster))
except Exception:
Log.debug(traceback.format_exc())
raise
def get_cluster_role_topologies(cluster, role):
"""Synced API call to get topologies under a cluster submitted by a role"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: API.get_cluster_role_topologies(cluster, role))
except Exception:
Log.debug(traceback.format_exc())
raise
def get_cluster_role_env_topologies(cluster, role, env):
"""Synced API call to get topologies under a cluster submitted by a role under env"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: API.get_cluster_role_env_topologies(cluster, role, env))
except Exception:
Log.debug(traceback.format_exc())
raise
|
dayatz/taiga-back | refs/heads/stable | taiga/base/db/models/fields/json.py | 1 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2017 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2017 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2017 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.postgres.fields import JSONField as DjangoJSONField
# NOTE: After upgrade Django to the future release (1.11) change
# class JSONField(FutureDjangoJSONField):
# to
# class JSONField(DjangoJSONField):
# and remove the classes JsonAdapter and FutureDjangoJSONField
import json
from psycopg2.extras import Json
from django.core import exceptions
class JsonAdapter(Json):
"""
Customized psycopg2.extras.Json to allow for a custom encoder.
"""
def __init__(self, adapted, dumps=None, encoder=None):
self.encoder = encoder
super().__init__(adapted, dumps=dumps)
def dumps(self, obj):
options = {'cls': self.encoder} if self.encoder else {}
return json.dumps(obj, **options)
class FutureDjangoJSONField(DjangoJSONField):
def __init__(self, verbose_name=None, name=None, encoder=None, **kwargs):
if encoder and not callable(encoder):
raise ValueError("The encoder parameter must be a callable object.")
self.encoder = encoder
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.encoder is not None:
kwargs['encoder'] = self.encoder
return name, path, args, kwargs
def get_prep_value(self, value):
if value is not None:
return JsonAdapter(value, encoder=self.encoder)
return value
def validate(self, value, model_instance):
super().validate(value, model_instance)
options = {'cls': self.encoder} if self.encoder else {}
try:
json.dumps(value, **options)
except TypeError:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
__all__ = ["JSONField"]
class JSONField(FutureDjangoJSONField):
def __init__(self, verbose_name=None, name=None, encoder=DjangoJSONEncoder, **kwargs):
super().__init__(verbose_name, name, encoder, **kwargs)
|
michigraber/scikit-learn | refs/heads/master | examples/cluster/plot_lena_segmentation.py | 271 | """
=========================================
Segmenting the picture of Lena in regions
=========================================
This example uses :ref:`spectral_clustering` on a graph created from
voxel-to-voxel difference on an image to break this image into multiple
partly-homogeneous regions.
This procedure (spectral clustering on an image) is an efficient
approximate solution for finding normalized graph cuts.
There are two options to assign labels:
* with 'kmeans' spectral clustering will cluster samples in the embedding space
using a kmeans algorithm
* whereas 'discrete' will iteratively search for the closest partition
space to the embedding space.
"""
print(__doc__)
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>, Brian Cheung
# License: BSD 3 clause
import time
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from sklearn.feature_extraction import image
from sklearn.cluster import spectral_clustering
lena = sp.misc.lena()
# Downsample the image by a factor of 4
lena = lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2] + lena[1::2, 1::2]
lena = lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2] + lena[1::2, 1::2]
# Convert the image into a graph with the value of the gradient on the
# edges.
graph = image.img_to_graph(lena)
# Take a decreasing function of the gradient: an exponential
# The smaller beta is, the more independent the segmentation is of the
# actual image. For beta=1, the segmentation is close to a voronoi
beta = 5
eps = 1e-6
graph.data = np.exp(-beta * graph.data / lena.std()) + eps
# Apply spectral clustering (this step goes much faster if you have pyamg
# installed)
N_REGIONS = 11
###############################################################################
# Visualize the resulting regions
for assign_labels in ('kmeans', 'discretize'):
t0 = time.time()
labels = spectral_clustering(graph, n_clusters=N_REGIONS,
assign_labels=assign_labels,
random_state=1)
t1 = time.time()
labels = labels.reshape(lena.shape)
plt.figure(figsize=(5, 5))
plt.imshow(lena, cmap=plt.cm.gray)
for l in range(N_REGIONS):
plt.contour(labels == l, contours=1,
colors=[plt.cm.spectral(l / float(N_REGIONS)), ])
plt.xticks(())
plt.yticks(())
plt.title('Spectral clustering: %s, %.2fs' % (assign_labels, (t1 - t0)))
plt.show()
|
nzavagli/UnrealPy | refs/heads/master | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Twisted-15.2.1/twisted/test/test_stateful.py | 10 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for twisted.protocols.stateful
"""
from twisted.trial.unittest import TestCase
from twisted.protocols.test import test_basic
from twisted.protocols.stateful import StatefulProtocol
from struct import pack, unpack, calcsize
class MyInt32StringReceiver(StatefulProtocol):
"""
A stateful Int32StringReceiver.
"""
MAX_LENGTH = 99999
structFormat = "!I"
prefixLength = calcsize(structFormat)
def getInitialState(self):
return self._getHeader, 4
def lengthLimitExceeded(self, length):
self.transport.loseConnection()
def _getHeader(self, msg):
length, = unpack("!i", msg)
if length > self.MAX_LENGTH:
self.lengthLimitExceeded(length)
return
return self._getString, length
def _getString(self, msg):
self.stringReceived(msg)
return self._getHeader, 4
def stringReceived(self, msg):
"""
Override this.
"""
raise NotImplementedError
def sendString(self, data):
"""
Send an int32-prefixed string to the other end of the connection.
"""
self.transport.write(pack(self.structFormat, len(data)) + data)
class TestInt32(MyInt32StringReceiver):
def connectionMade(self):
self.received = []
def stringReceived(self, s):
self.received.append(s)
MAX_LENGTH = 50
closed = 0
def connectionLost(self, reason):
self.closed = 1
class Int32Tests(TestCase, test_basic.IntNTestCaseMixin):
protocol = TestInt32
strings = ["a", "b" * 16]
illegalStrings = ["\x10\x00\x00\x00aaaaaa"]
partialStrings = ["\x00\x00\x00", "hello there", ""]
def test_bigReceive(self):
r = self.getProtocol()
big = ""
for s in self.strings * 4:
big += pack("!i", len(s)) + s
r.dataReceived(big)
self.assertEqual(r.received, self.strings * 4)
|
GDGND/evm | refs/heads/master | allauth/socialaccount/providers/angellist/urls.py | 80 | from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import AngelListProvider
urlpatterns = default_urlpatterns(AngelListProvider)
|
lvming/linux | refs/heads/htnice-3.18 | scripts/analyze_suspend.py | 1540 | #!/usr/bin/python
#
# Tool for analyzing suspend/resume timing
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors:
# Todd Brandt <todd.e.brandt@linux.intel.com>
#
# Description:
# This tool is designed to assist kernel and OS developers in optimizing
# their linux stack's suspend/resume time. Using a kernel image built
# with a few extra options enabled, the tool will execute a suspend and
# will capture dmesg and ftrace data until resume is complete. This data
# is transformed into a device timeline and a callgraph to give a quick
# and detailed view of which devices and callbacks are taking the most
# time in suspend/resume. The output is a single html file which can be
# viewed in firefox or chrome.
#
# The following kernel build options are required:
# CONFIG_PM_DEBUG=y
# CONFIG_PM_SLEEP_DEBUG=y
# CONFIG_FTRACE=y
# CONFIG_FUNCTION_TRACER=y
# CONFIG_FUNCTION_GRAPH_TRACER=y
#
# For kernel versions older than 3.15:
# The following additional kernel parameters are required:
# (e.g. in file /etc/default/grub)
# GRUB_CMDLINE_LINUX_DEFAULT="... initcall_debug log_buf_len=16M ..."
#
# ----------------- LIBRARIES --------------------
import sys
import time
import os
import string
import re
import platform
from datetime import datetime
import struct
# ----------------- CLASSES --------------------
# Class: SystemValues
# Description:
# A global, single-instance container used to
# store system values and test parameters
class SystemValues:
version = 3.0
verbose = False
testdir = '.'
tpath = '/sys/kernel/debug/tracing/'
fpdtpath = '/sys/firmware/acpi/tables/FPDT'
epath = '/sys/kernel/debug/tracing/events/power/'
traceevents = [
'suspend_resume',
'device_pm_callback_end',
'device_pm_callback_start'
]
modename = {
'freeze': 'Suspend-To-Idle (S0)',
'standby': 'Power-On Suspend (S1)',
'mem': 'Suspend-to-RAM (S3)',
'disk': 'Suspend-to-disk (S4)'
}
mempath = '/dev/mem'
powerfile = '/sys/power/state'
suspendmode = 'mem'
hostname = 'localhost'
prefix = 'test'
teststamp = ''
dmesgfile = ''
ftracefile = ''
htmlfile = ''
rtcwake = False
rtcwaketime = 10
rtcpath = ''
android = False
adb = 'adb'
devicefilter = []
stamp = 0
execcount = 1
x2delay = 0
usecallgraph = False
usetraceevents = False
usetraceeventsonly = False
notestrun = False
altdevname = dict()
postresumetime = 0
tracertypefmt = '# tracer: (?P<t>.*)'
firmwarefmt = '# fwsuspend (?P<s>[0-9]*) fwresume (?P<r>[0-9]*)$'
postresumefmt = '# post resume time (?P<t>[0-9]*)$'
stampfmt = '# suspend-(?P<m>[0-9]{2})(?P<d>[0-9]{2})(?P<y>[0-9]{2})-'+\
'(?P<H>[0-9]{2})(?P<M>[0-9]{2})(?P<S>[0-9]{2})'+\
' (?P<host>.*) (?P<mode>.*) (?P<kernel>.*)$'
def __init__(self):
self.hostname = platform.node()
if(self.hostname == ''):
self.hostname = 'localhost'
rtc = "rtc0"
if os.path.exists('/dev/rtc'):
rtc = os.readlink('/dev/rtc')
rtc = '/sys/class/rtc/'+rtc
if os.path.exists(rtc) and os.path.exists(rtc+'/date') and \
os.path.exists(rtc+'/time') and os.path.exists(rtc+'/wakealarm'):
self.rtcpath = rtc
def setOutputFile(self):
if((self.htmlfile == '') and (self.dmesgfile != '')):
m = re.match('(?P<name>.*)_dmesg\.txt$', self.dmesgfile)
if(m):
self.htmlfile = m.group('name')+'.html'
if((self.htmlfile == '') and (self.ftracefile != '')):
m = re.match('(?P<name>.*)_ftrace\.txt$', self.ftracefile)
if(m):
self.htmlfile = m.group('name')+'.html'
if(self.htmlfile == ''):
self.htmlfile = 'output.html'
def initTestOutput(self, subdir):
if(not self.android):
self.prefix = self.hostname
v = open('/proc/version', 'r').read().strip()
kver = string.split(v)[2]
else:
self.prefix = 'android'
v = os.popen(self.adb+' shell cat /proc/version').read().strip()
kver = string.split(v)[2]
testtime = datetime.now().strftime('suspend-%m%d%y-%H%M%S')
if(subdir != "."):
self.testdir = subdir+"/"+testtime
else:
self.testdir = testtime
self.teststamp = \
'# '+testtime+' '+self.prefix+' '+self.suspendmode+' '+kver
self.dmesgfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_dmesg.txt'
self.ftracefile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_ftrace.txt'
self.htmlfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'.html'
os.mkdir(self.testdir)
def setDeviceFilter(self, devnames):
self.devicefilter = string.split(devnames)
def rtcWakeAlarm(self):
os.system('echo 0 > '+self.rtcpath+'/wakealarm')
outD = open(self.rtcpath+'/date', 'r').read().strip()
outT = open(self.rtcpath+'/time', 'r').read().strip()
mD = re.match('^(?P<y>[0-9]*)-(?P<m>[0-9]*)-(?P<d>[0-9]*)', outD)
mT = re.match('^(?P<h>[0-9]*):(?P<m>[0-9]*):(?P<s>[0-9]*)', outT)
if(mD and mT):
# get the current time from hardware
utcoffset = int((datetime.now() - datetime.utcnow()).total_seconds())
dt = datetime(\
int(mD.group('y')), int(mD.group('m')), int(mD.group('d')),
int(mT.group('h')), int(mT.group('m')), int(mT.group('s')))
nowtime = int(dt.strftime('%s')) + utcoffset
else:
# if hardware time fails, use the software time
nowtime = int(datetime.now().strftime('%s'))
alarm = nowtime + self.rtcwaketime
os.system('echo %d > %s/wakealarm' % (alarm, self.rtcpath))
sysvals = SystemValues()
# Class: DeviceNode
# Description:
# A container used to create a device hierachy, with a single root node
# and a tree of child nodes. Used by Data.deviceTopology()
class DeviceNode:
name = ''
children = 0
depth = 0
def __init__(self, nodename, nodedepth):
self.name = nodename
self.children = []
self.depth = nodedepth
# Class: Data
# Description:
# The primary container for suspend/resume test data. There is one for
# each test run. The data is organized into a cronological hierarchy:
# Data.dmesg {
# root structure, started as dmesg & ftrace, but now only ftrace
# contents: times for suspend start/end, resume start/end, fwdata
# phases {
# 10 sequential, non-overlapping phases of S/R
# contents: times for phase start/end, order/color data for html
# devlist {
# device callback or action list for this phase
# device {
# a single device callback or generic action
# contents: start/stop times, pid/cpu/driver info
# parents/children, html id for timeline/callgraph
# optionally includes an ftrace callgraph
# optionally includes intradev trace events
# }
# }
# }
# }
#
class Data:
dmesg = {} # root data structure
phases = [] # ordered list of phases
start = 0.0 # test start
end = 0.0 # test end
tSuspended = 0.0 # low-level suspend start
tResumed = 0.0 # low-level resume start
tLow = 0.0 # time spent in low-level suspend (standby/freeze)
fwValid = False # is firmware data available
fwSuspend = 0 # time spent in firmware suspend
fwResume = 0 # time spent in firmware resume
dmesgtext = [] # dmesg text file in memory
testnumber = 0
idstr = ''
html_device_id = 0
stamp = 0
outfile = ''
def __init__(self, num):
idchar = 'abcdefghijklmnopqrstuvwxyz'
self.testnumber = num
self.idstr = idchar[num]
self.dmesgtext = []
self.phases = []
self.dmesg = { # fixed list of 10 phases
'suspend_prepare': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#CCFFCC', 'order': 0},
'suspend': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#88FF88', 'order': 1},
'suspend_late': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#00AA00', 'order': 2},
'suspend_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#008888', 'order': 3},
'suspend_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#0000FF', 'order': 4},
'resume_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF0000', 'order': 5},
'resume_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF9900', 'order': 6},
'resume_early': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFCC00', 'order': 7},
'resume': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFF88', 'order': 8},
'resume_complete': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFFCC', 'order': 9}
}
self.phases = self.sortedPhases()
def getStart(self):
return self.dmesg[self.phases[0]]['start']
def setStart(self, time):
self.start = time
self.dmesg[self.phases[0]]['start'] = time
def getEnd(self):
return self.dmesg[self.phases[-1]]['end']
def setEnd(self, time):
self.end = time
self.dmesg[self.phases[-1]]['end'] = time
def isTraceEventOutsideDeviceCalls(self, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
return False
return True
def addIntraDevTraceEvent(self, action, name, pid, time):
if(action == 'mutex_lock_try'):
color = 'red'
elif(action == 'mutex_lock_pass'):
color = 'green'
elif(action == 'mutex_unlock'):
color = 'blue'
else:
# create separate colors based on the name
v1 = len(name)*10 % 256
v2 = string.count(name, 'e')*100 % 256
v3 = ord(name[0])*20 % 256
color = '#%06X' % ((v1*0x10000) + (v2*0x100) + v3)
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
e = TraceEvent(action, name, color, time)
if('traceevents' not in d):
d['traceevents'] = []
d['traceevents'].append(e)
return d
break
return 0
def capIntraDevTraceEvent(self, action, name, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
if('traceevents' not in d):
return
for e in d['traceevents']:
if(e.action == action and
e.name == name and not e.ready):
e.length = time - e.time
e.ready = True
break
return
def trimTimeVal(self, t, t0, dT, left):
if left:
if(t > t0):
if(t - dT < t0):
return t0
return t - dT
else:
return t
else:
if(t < t0 + dT):
if(t > t0):
return t0 + dT
return t + dT
else:
return t
def trimTime(self, t0, dT, left):
self.tSuspended = self.trimTimeVal(self.tSuspended, t0, dT, left)
self.tResumed = self.trimTimeVal(self.tResumed, t0, dT, left)
self.start = self.trimTimeVal(self.start, t0, dT, left)
self.end = self.trimTimeVal(self.end, t0, dT, left)
for phase in self.phases:
p = self.dmesg[phase]
p['start'] = self.trimTimeVal(p['start'], t0, dT, left)
p['end'] = self.trimTimeVal(p['end'], t0, dT, left)
list = p['list']
for name in list:
d = list[name]
d['start'] = self.trimTimeVal(d['start'], t0, dT, left)
d['end'] = self.trimTimeVal(d['end'], t0, dT, left)
if('ftrace' in d):
cg = d['ftrace']
cg.start = self.trimTimeVal(cg.start, t0, dT, left)
cg.end = self.trimTimeVal(cg.end, t0, dT, left)
for line in cg.list:
line.time = self.trimTimeVal(line.time, t0, dT, left)
if('traceevents' in d):
for e in d['traceevents']:
e.time = self.trimTimeVal(e.time, t0, dT, left)
def normalizeTime(self, tZero):
# first trim out any standby or freeze clock time
if(self.tSuspended != self.tResumed):
if(self.tResumed > tZero):
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, True)
else:
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, False)
# shift the timeline so that tZero is the new 0
self.tSuspended -= tZero
self.tResumed -= tZero
self.start -= tZero
self.end -= tZero
for phase in self.phases:
p = self.dmesg[phase]
p['start'] -= tZero
p['end'] -= tZero
list = p['list']
for name in list:
d = list[name]
d['start'] -= tZero
d['end'] -= tZero
if('ftrace' in d):
cg = d['ftrace']
cg.start -= tZero
cg.end -= tZero
for line in cg.list:
line.time -= tZero
if('traceevents' in d):
for e in d['traceevents']:
e.time -= tZero
def newPhaseWithSingleAction(self, phasename, devname, start, end, color):
for phase in self.phases:
self.dmesg[phase]['order'] += 1
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = dict()
list[devname] = \
{'start': start, 'end': end, 'pid': 0, 'par': '',
'length': (end-start), 'row': 0, 'id': devid, 'drv': '' };
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': 0}
self.phases = self.sortedPhases()
def newPhase(self, phasename, start, end, color, order):
if(order < 0):
order = len(self.phases)
for phase in self.phases[order:]:
self.dmesg[phase]['order'] += 1
if(order > 0):
p = self.phases[order-1]
self.dmesg[p]['end'] = start
if(order < len(self.phases)):
p = self.phases[order]
self.dmesg[p]['start'] = end
list = dict()
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': order}
self.phases = self.sortedPhases()
def setPhase(self, phase, ktime, isbegin):
if(isbegin):
self.dmesg[phase]['start'] = ktime
else:
self.dmesg[phase]['end'] = ktime
def dmesgSortVal(self, phase):
return self.dmesg[phase]['order']
def sortedPhases(self):
return sorted(self.dmesg, key=self.dmesgSortVal)
def sortedDevices(self, phase):
list = self.dmesg[phase]['list']
slist = []
tmp = dict()
for devname in list:
dev = list[devname]
tmp[dev['start']] = devname
for t in sorted(tmp):
slist.append(tmp[t])
return slist
def fixupInitcalls(self, phase, end):
# if any calls never returned, clip them at system resume end
phaselist = self.dmesg[phase]['list']
for devname in phaselist:
dev = phaselist[devname]
if(dev['end'] < 0):
dev['end'] = end
vprint('%s (%s): callback didnt return' % (devname, phase))
def deviceFilter(self, devicefilter):
# remove all by the relatives of the filter devnames
filter = []
for phase in self.phases:
list = self.dmesg[phase]['list']
for name in devicefilter:
dev = name
while(dev in list):
if(dev not in filter):
filter.append(dev)
dev = list[dev]['par']
children = self.deviceDescendants(name, phase)
for dev in children:
if(dev not in filter):
filter.append(dev)
for phase in self.phases:
list = self.dmesg[phase]['list']
rmlist = []
for name in list:
pid = list[name]['pid']
if(name not in filter and pid >= 0):
rmlist.append(name)
for name in rmlist:
del list[name]
def fixupInitcallsThatDidntReturn(self):
# if any calls never returned, clip them at system resume end
for phase in self.phases:
self.fixupInitcalls(phase, self.getEnd())
def newActionGlobal(self, name, start, end):
# which phase is this device callback or action "in"
targetphase = "none"
overlap = 0.0
for phase in self.phases:
pstart = self.dmesg[phase]['start']
pend = self.dmesg[phase]['end']
o = max(0, min(end, pend) - max(start, pstart))
if(o > overlap):
targetphase = phase
overlap = o
if targetphase in self.phases:
self.newAction(targetphase, name, -1, '', start, end, '')
return True
return False
def newAction(self, phase, name, pid, parent, start, end, drv):
# new device callback for a specific phase
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = self.dmesg[phase]['list']
length = -1.0
if(start >= 0 and end >= 0):
length = end - start
list[name] = {'start': start, 'end': end, 'pid': pid, 'par': parent,
'length': length, 'row': 0, 'id': devid, 'drv': drv }
def deviceIDs(self, devlist, phase):
idlist = []
list = self.dmesg[phase]['list']
for devname in list:
if devname in devlist:
idlist.append(list[devname]['id'])
return idlist
def deviceParentID(self, devname, phase):
pdev = ''
pdevid = ''
list = self.dmesg[phase]['list']
if devname in list:
pdev = list[devname]['par']
if pdev in list:
return list[pdev]['id']
return pdev
def deviceChildren(self, devname, phase):
devlist = []
list = self.dmesg[phase]['list']
for child in list:
if(list[child]['par'] == devname):
devlist.append(child)
return devlist
def deviceDescendants(self, devname, phase):
children = self.deviceChildren(devname, phase)
family = children
for child in children:
family += self.deviceDescendants(child, phase)
return family
def deviceChildrenIDs(self, devname, phase):
devlist = self.deviceChildren(devname, phase)
return self.deviceIDs(devlist, phase)
def printDetails(self):
vprint(' test start: %f' % self.start)
for phase in self.phases:
dc = len(self.dmesg[phase]['list'])
vprint(' %16s: %f - %f (%d devices)' % (phase, \
self.dmesg[phase]['start'], self.dmesg[phase]['end'], dc))
vprint(' test end: %f' % self.end)
def masterTopology(self, name, list, depth):
node = DeviceNode(name, depth)
for cname in list:
clist = self.deviceChildren(cname, 'resume')
cnode = self.masterTopology(cname, clist, depth+1)
node.children.append(cnode)
return node
def printTopology(self, node):
html = ''
if node.name:
info = ''
drv = ''
for phase in self.phases:
list = self.dmesg[phase]['list']
if node.name in list:
s = list[node.name]['start']
e = list[node.name]['end']
if list[node.name]['drv']:
drv = ' {'+list[node.name]['drv']+'}'
info += ('<li>%s: %.3fms</li>' % (phase, (e-s)*1000))
html += '<li><b>'+node.name+drv+'</b>'
if info:
html += '<ul>'+info+'</ul>'
html += '</li>'
if len(node.children) > 0:
html += '<ul>'
for cnode in node.children:
html += self.printTopology(cnode)
html += '</ul>'
return html
def rootDeviceList(self):
# list of devices graphed
real = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
if list[dev]['pid'] >= 0 and dev not in real:
real.append(dev)
# list of top-most root devices
rootlist = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
pdev = list[dev]['par']
if(re.match('[0-9]*-[0-9]*\.[0-9]*[\.0-9]*\:[\.0-9]*$', pdev)):
continue
if pdev and pdev not in real and pdev not in rootlist:
rootlist.append(pdev)
return rootlist
def deviceTopology(self):
rootlist = self.rootDeviceList()
master = self.masterTopology('', rootlist, 0)
return self.printTopology(master)
# Class: TraceEvent
# Description:
# A container for trace event data found in the ftrace file
class TraceEvent:
ready = False
name = ''
time = 0.0
color = '#FFFFFF'
length = 0.0
action = ''
def __init__(self, a, n, c, t):
self.action = a
self.name = n
self.color = c
self.time = t
# Class: FTraceLine
# Description:
# A container for a single line of ftrace data. There are six basic types:
# callgraph line:
# call: " dpm_run_callback() {"
# return: " }"
# leaf: " dpm_run_callback();"
# trace event:
# tracing_mark_write: SUSPEND START or RESUME COMPLETE
# suspend_resume: phase or custom exec block data
# device_pm_callback: device callback info
class FTraceLine:
time = 0.0
length = 0.0
fcall = False
freturn = False
fevent = False
depth = 0
name = ''
type = ''
def __init__(self, t, m, d):
self.time = float(t)
# is this a trace event
if(d == 'traceevent' or re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)):
if(d == 'traceevent'):
# nop format trace event
msg = m
else:
# function_graph format trace event
em = re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)
msg = em.group('msg')
emm = re.match('^(?P<call>.*?): (?P<msg>.*)', msg)
if(emm):
self.name = emm.group('msg')
self.type = emm.group('call')
else:
self.name = msg
self.fevent = True
return
# convert the duration to seconds
if(d):
self.length = float(d)/1000000
# the indentation determines the depth
match = re.match('^(?P<d> *)(?P<o>.*)$', m)
if(not match):
return
self.depth = self.getDepth(match.group('d'))
m = match.group('o')
# function return
if(m[0] == '}'):
self.freturn = True
if(len(m) > 1):
# includes comment with function name
match = re.match('^} *\/\* *(?P<n>.*) *\*\/$', m)
if(match):
self.name = match.group('n')
# function call
else:
self.fcall = True
# function call with children
if(m[-1] == '{'):
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n')
# function call with no children (leaf)
elif(m[-1] == ';'):
self.freturn = True
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n')
# something else (possibly a trace marker)
else:
self.name = m
def getDepth(self, str):
return len(str)/2
def debugPrint(self, dev):
if(self.freturn and self.fcall):
print('%s -- %f (%02d): %s(); (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
elif(self.freturn):
print('%s -- %f (%02d): %s} (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
else:
print('%s -- %f (%02d): %s() { (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
# Class: FTraceCallGraph
# Description:
# A container for the ftrace callgraph of a single recursive function.
# This can be a dpm_run_callback, dpm_prepare, or dpm_complete callgraph
# Each instance is tied to a single device in a single phase, and is
# comprised of an ordered list of FTraceLine objects
class FTraceCallGraph:
start = -1.0
end = -1.0
list = []
invalid = False
depth = 0
def __init__(self):
self.start = -1.0
self.end = -1.0
self.list = []
self.depth = 0
def setDepth(self, line):
if(line.fcall and not line.freturn):
line.depth = self.depth
self.depth += 1
elif(line.freturn and not line.fcall):
self.depth -= 1
line.depth = self.depth
else:
line.depth = self.depth
def addLine(self, line, match):
if(not self.invalid):
self.setDepth(line)
if(line.depth == 0 and line.freturn):
if(self.start < 0):
self.start = line.time
self.end = line.time
self.list.append(line)
return True
if(self.invalid):
return False
if(len(self.list) >= 1000000 or self.depth < 0):
if(len(self.list) > 0):
first = self.list[0]
self.list = []
self.list.append(first)
self.invalid = True
if(not match):
return False
id = 'task %s cpu %s' % (match.group('pid'), match.group('cpu'))
window = '(%f - %f)' % (self.start, line.time)
if(self.depth < 0):
print('Too much data for '+id+\
' (buffer overflow), ignoring this callback')
else:
print('Too much data for '+id+\
' '+window+', ignoring this callback')
return False
self.list.append(line)
if(self.start < 0):
self.start = line.time
return False
def slice(self, t0, tN):
minicg = FTraceCallGraph()
count = -1
firstdepth = 0
for l in self.list:
if(l.time < t0 or l.time > tN):
continue
if(count < 0):
if(not l.fcall or l.name == 'dev_driver_string'):
continue
firstdepth = l.depth
count = 0
l.depth -= firstdepth
minicg.addLine(l, 0)
if((count == 0 and l.freturn and l.fcall) or
(count > 0 and l.depth <= 0)):
break
count += 1
return minicg
def sanityCheck(self):
stack = dict()
cnt = 0
for l in self.list:
if(l.fcall and not l.freturn):
stack[l.depth] = l
cnt += 1
elif(l.freturn and not l.fcall):
if(l.depth not in stack):
return False
stack[l.depth].length = l.length
stack[l.depth] = 0
l.length = 0
cnt -= 1
if(cnt == 0):
return True
return False
def debugPrint(self, filename):
if(filename == 'stdout'):
print('[%f - %f]') % (self.start, self.end)
for l in self.list:
if(l.freturn and l.fcall):
print('%f (%02d): %s(); (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
print('%f (%02d): %s} (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
print('%f (%02d): %s() { (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
print(' ')
else:
fp = open(filename, 'w')
print(filename)
for l in self.list:
if(l.freturn and l.fcall):
fp.write('%f (%02d): %s(); (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
fp.write('%f (%02d): %s} (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
fp.write('%f (%02d): %s() { (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
fp.close()
# Class: Timeline
# Description:
# A container for a suspend/resume html timeline. In older versions
# of the script there were multiple timelines, but in the latest
# there is only one.
class Timeline:
html = {}
scaleH = 0.0 # height of the row as a percent of the timeline height
rowH = 0.0 # height of each row in percent of the timeline height
row_height_pixels = 30
maxrows = 0
height = 0
def __init__(self):
self.html = {
'timeline': '',
'legend': '',
'scale': ''
}
def setRows(self, rows):
self.maxrows = int(rows)
self.scaleH = 100.0/float(self.maxrows)
self.height = self.maxrows*self.row_height_pixels
r = float(self.maxrows - 1)
if(r < 1.0):
r = 1.0
self.rowH = (100.0 - self.scaleH)/r
# Class: TestRun
# Description:
# A container for a suspend/resume test run. This is necessary as
# there could be more than one, and they need to be separate.
class TestRun:
ftrace_line_fmt_fg = \
'^ *(?P<time>[0-9\.]*) *\| *(?P<cpu>[0-9]*)\)'+\
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\|'+\
'[ +!]*(?P<dur>[0-9\.]*) .*\| (?P<msg>.*)'
ftrace_line_fmt_nop = \
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\[(?P<cpu>[0-9]*)\] *'+\
'(?P<flags>.{4}) *(?P<time>[0-9\.]*): *'+\
'(?P<msg>.*)'
ftrace_line_fmt = ftrace_line_fmt_nop
cgformat = False
ftemp = dict()
ttemp = dict()
inthepipe = False
tracertype = ''
data = 0
def __init__(self, dataobj):
self.data = dataobj
self.ftemp = dict()
self.ttemp = dict()
def isReady(self):
if(tracertype == '' or not data):
return False
return True
def setTracerType(self, tracer):
self.tracertype = tracer
if(tracer == 'function_graph'):
self.cgformat = True
self.ftrace_line_fmt = self.ftrace_line_fmt_fg
elif(tracer == 'nop'):
self.ftrace_line_fmt = self.ftrace_line_fmt_nop
else:
doError('Invalid tracer format: [%s]' % tracer, False)
# ----------------- FUNCTIONS --------------------
# Function: vprint
# Description:
# verbose print (prints only with -verbose option)
# Arguments:
# msg: the debug/log message to print
def vprint(msg):
global sysvals
if(sysvals.verbose):
print(msg)
# Function: initFtrace
# Description:
# Configure ftrace to use trace events and/or a callgraph
def initFtrace():
global sysvals
tp = sysvals.tpath
cf = 'dpm_run_callback'
if(sysvals.usetraceeventsonly):
cf = '-e dpm_prepare -e dpm_complete -e dpm_run_callback'
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('INITIALIZING FTRACE...')
# turn trace off
os.system('echo 0 > '+tp+'tracing_on')
# set the trace clock to global
os.system('echo global > '+tp+'trace_clock')
# set trace buffer to a huge value
os.system('echo nop > '+tp+'current_tracer')
os.system('echo 100000 > '+tp+'buffer_size_kb')
# initialize the callgraph trace, unless this is an x2 run
if(sysvals.usecallgraph and sysvals.execcount == 1):
# set trace type
os.system('echo function_graph > '+tp+'current_tracer')
os.system('echo "" > '+tp+'set_ftrace_filter')
# set trace format options
os.system('echo funcgraph-abstime > '+tp+'trace_options')
os.system('echo funcgraph-proc > '+tp+'trace_options')
# focus only on device suspend and resume
os.system('cat '+tp+'available_filter_functions | grep '+\
cf+' > '+tp+'set_graph_function')
if(sysvals.usetraceevents):
# turn trace events on
events = iter(sysvals.traceevents)
for e in events:
os.system('echo 1 > '+sysvals.epath+e+'/enable')
# clear the trace buffer
os.system('echo "" > '+tp+'trace')
# Function: initFtraceAndroid
# Description:
# Configure ftrace to capture trace events
def initFtraceAndroid():
global sysvals
tp = sysvals.tpath
if(sysvals.usetraceevents):
print('INITIALIZING FTRACE...')
# turn trace off
os.system(sysvals.adb+" shell 'echo 0 > "+tp+"tracing_on'")
# set the trace clock to global
os.system(sysvals.adb+" shell 'echo global > "+tp+"trace_clock'")
# set trace buffer to a huge value
os.system(sysvals.adb+" shell 'echo nop > "+tp+"current_tracer'")
os.system(sysvals.adb+" shell 'echo 10000 > "+tp+"buffer_size_kb'")
# turn trace events on
events = iter(sysvals.traceevents)
for e in events:
os.system(sysvals.adb+" shell 'echo 1 > "+\
sysvals.epath+e+"/enable'")
# clear the trace buffer
os.system(sysvals.adb+" shell 'echo \"\" > "+tp+"trace'")
# Function: verifyFtrace
# Description:
# Check that ftrace is working on the system
# Output:
# True or False
def verifyFtrace():
global sysvals
# files needed for any trace data
files = ['buffer_size_kb', 'current_tracer', 'trace', 'trace_clock',
'trace_marker', 'trace_options', 'tracing_on']
# files needed for callgraph trace data
tp = sysvals.tpath
if(sysvals.usecallgraph):
files += [
'available_filter_functions',
'set_ftrace_filter',
'set_graph_function'
]
for f in files:
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls '+tp+f).read().strip()
if(out != tp+f):
return False
else:
if(os.path.exists(tp+f) == False):
return False
return True
# Function: parseStamp
# Description:
# Pull in the stamp comment line from the data file(s),
# create the stamp, and add it to the global sysvals object
# Arguments:
# m: the valid re.match output for the stamp line
def parseStamp(m, data):
global sysvals
data.stamp = {'time': '', 'host': '', 'mode': ''}
dt = datetime(int(m.group('y'))+2000, int(m.group('m')),
int(m.group('d')), int(m.group('H')), int(m.group('M')),
int(m.group('S')))
data.stamp['time'] = dt.strftime('%B %d %Y, %I:%M:%S %p')
data.stamp['host'] = m.group('host')
data.stamp['mode'] = m.group('mode')
data.stamp['kernel'] = m.group('kernel')
sysvals.suspendmode = data.stamp['mode']
if not sysvals.stamp:
sysvals.stamp = data.stamp
# Function: diffStamp
# Description:
# compare the host, kernel, and mode fields in 3 stamps
# Arguments:
# stamp1: string array with mode, kernel, and host
# stamp2: string array with mode, kernel, and host
# Return:
# True if stamps differ, False if they're the same
def diffStamp(stamp1, stamp2):
if 'host' in stamp1 and 'host' in stamp2:
if stamp1['host'] != stamp2['host']:
return True
if 'kernel' in stamp1 and 'kernel' in stamp2:
if stamp1['kernel'] != stamp2['kernel']:
return True
if 'mode' in stamp1 and 'mode' in stamp2:
if stamp1['mode'] != stamp2['mode']:
return True
return False
# Function: doesTraceLogHaveTraceEvents
# Description:
# Quickly determine if the ftrace log has some or all of the trace events
# required for primary parsing. Set the usetraceevents and/or
# usetraceeventsonly flags in the global sysvals object
def doesTraceLogHaveTraceEvents():
global sysvals
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
out = os.popen('cat '+sysvals.ftracefile+' | grep "'+e+': "').read()
if(not out):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and out):
sysvals.usetraceevents = True
# Function: appendIncompleteTraceLog
# Description:
# [deprecated for kernel 3.15 or newer]
# Legacy support of ftrace outputs that lack the device_pm_callback
# and/or suspend_resume trace events. The primary data should be
# taken from dmesg, and this ftrace is used only for callgraph data
# or custom actions in the timeline. The data is appended to the Data
# objects provided.
# Arguments:
# testruns: the array of Data objects obtained from parseKernelLog
def appendIncompleteTraceLog(testruns):
global sysvals
# create TestRun vessels for ftrace parsing
testcnt = len(testruns)
testidx = -1
testrun = []
for data in testruns:
testrun.append(TestRun(data))
# extract the callgraph and traceevent data
vprint('Analyzing the ftrace data...')
tf = open(sysvals.ftracefile, 'r')
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# grab the time stamp first (signifies the start of the test run)
m = re.match(sysvals.stampfmt, line)
if(m):
testidx += 1
parseStamp(m, testrun[testidx].data)
continue
# pull out any firmware data
if(re.match(sysvals.firmwarefmt, line)):
continue
# if we havent found a test time stamp yet keep spinning til we do
if(testidx < 0):
continue
# determine the trace data type (required for further parsing)
m = re.match(sysvals.tracertypefmt, line)
if(m):
tracer = m.group('t')
testrun[testidx].setTracerType(tracer)
continue
# parse only valid lines, if this isnt one move on
m = re.match(testrun[testidx].ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(testrun[testidx].cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# only parse the ftrace data during suspend/resume
data = testrun[testidx].data
if(not testrun[testidx].inthepipe):
# look for the suspend start marker
if(t.fevent):
if(t.name == 'SUSPEND START'):
testrun[testidx].inthepipe = True
data.setStart(t.time)
continue
else:
# trace event processing
if(t.fevent):
if(t.name == 'RESUME COMPLETE'):
testrun[testidx].inthepipe = False
data.setEnd(t.time)
if(testidx == testcnt - 1):
break
continue
# general trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# special processing for trace events
if re.match('dpm_prepare\[.*', name):
continue
elif re.match('machine_suspend.*', name):
continue
elif re.match('suspend_enter\[.*', name):
if(not isbegin):
data.dmesg['suspend_prepare']['end'] = t.time
continue
elif re.match('dpm_suspend\[.*', name):
if(not isbegin):
data.dmesg['suspend']['end'] = t.time
continue
elif re.match('dpm_suspend_late\[.*', name):
if(isbegin):
data.dmesg['suspend_late']['start'] = t.time
else:
data.dmesg['suspend_late']['end'] = t.time
continue
elif re.match('dpm_suspend_noirq\[.*', name):
if(isbegin):
data.dmesg['suspend_noirq']['start'] = t.time
else:
data.dmesg['suspend_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_noirq\[.*', name):
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
data.dmesg['resume_noirq']['start'] = t.time
else:
data.dmesg['resume_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_early\[.*', name):
if(isbegin):
data.dmesg['resume_early']['start'] = t.time
else:
data.dmesg['resume_early']['end'] = t.time
continue
elif re.match('dpm_resume\[.*', name):
if(isbegin):
data.dmesg['resume']['start'] = t.time
else:
data.dmesg['resume']['end'] = t.time
continue
elif re.match('dpm_complete\[.*', name):
if(isbegin):
data.dmesg['resume_complete']['start'] = t.time
else:
data.dmesg['resume_complete']['end'] = t.time
continue
# is this trace event outside of the devices calls
if(data.isTraceEventOutsideDeviceCalls(pid, t.time)):
# global events (outside device calls) are simply graphed
if(isbegin):
# store each trace event in ttemp
if(name not in testrun[testidx].ttemp):
testrun[testidx].ttemp[name] = []
testrun[testidx].ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
# finish off matching trace event in ttemp
if(name in testrun[testidx].ttemp):
testrun[testidx].ttemp[name][-1]['end'] = t.time
else:
if(isbegin):
data.addIntraDevTraceEvent('', name, pid, t.time)
else:
data.capIntraDevTraceEvent('', name, pid, t.time)
# call/return processing
elif sysvals.usecallgraph:
# create a callgraph object for the data
if(pid not in testrun[testidx].ftemp):
testrun[testidx].ftemp[pid] = []
testrun[testidx].ftemp[pid].append(FTraceCallGraph())
# when the call is finished, see which device matches it
cg = testrun[testidx].ftemp[pid][-1]
if(cg.addLine(t, m)):
testrun[testidx].ftemp[pid].append(FTraceCallGraph())
tf.close()
for test in testrun:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < test.data.start):
test.data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > test.data.end):
test.data.setEnd(end)
test.data.newActionGlobal(name, begin, end)
# add the callgraph data to the device hierarchy
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if(not cg.sanityCheck()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
if(sysvals.verbose):
test.data.printDetails()
# add the time in between the tests as a new phase so we can see it
if(len(testruns) > 1):
t1e = testruns[0].getEnd()
t2s = testruns[-1].getStart()
testruns[-1].newPhaseWithSingleAction('user mode', \
'user mode', t1e, t2s, '#FF9966')
# Function: parseTraceLog
# Description:
# Analyze an ftrace log output file generated from this app during
# the execution phase. Used when the ftrace log is the primary data source
# and includes the suspend_resume and device_pm_callback trace events
# The ftrace filename is taken from sysvals
# Output:
# An array of Data objects
def parseTraceLog():
global sysvals
vprint('Analyzing the ftrace data...')
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s doesnt exist' % sysvals.ftracefile, False)
# extract the callgraph and traceevent data
testruns = []
testdata = []
testrun = 0
data = 0
tf = open(sysvals.ftracefile, 'r')
phase = 'suspend_prepare'
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# stamp line: each stamp means a new test run
m = re.match(sysvals.stampfmt, line)
if(m):
data = Data(len(testdata))
testdata.append(data)
testrun = TestRun(data)
testruns.append(testrun)
parseStamp(m, data)
continue
if(not data):
continue
# firmware line: pull out any firmware data
m = re.match(sysvals.firmwarefmt, line)
if(m):
data.fwSuspend = int(m.group('s'))
data.fwResume = int(m.group('r'))
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
continue
# tracer type line: determine the trace data type
m = re.match(sysvals.tracertypefmt, line)
if(m):
tracer = m.group('t')
testrun.setTracerType(tracer)
continue
# post resume time line: did this test run include post-resume data
m = re.match(sysvals.postresumefmt, line)
if(m):
t = int(m.group('t'))
if(t > 0):
sysvals.postresumetime = t
continue
# ftrace line: parse only valid lines
m = re.match(testrun.ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(testrun.cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# only parse the ftrace data during suspend/resume
if(not testrun.inthepipe):
# look for the suspend start marker
if(t.fevent):
if(t.name == 'SUSPEND START'):
testrun.inthepipe = True
data.setStart(t.time)
continue
# trace event processing
if(t.fevent):
if(t.name == 'RESUME COMPLETE'):
if(sysvals.postresumetime > 0):
phase = 'post_resume'
data.newPhase(phase, t.time, t.time, '#FF9966', -1)
else:
testrun.inthepipe = False
data.setEnd(t.time)
continue
if(phase == 'post_resume'):
data.setEnd(t.time)
if(t.type == 'suspend_resume'):
# suspend_resume trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# ignore these events
if(re.match('acpi_suspend\[.*', t.name) or
re.match('suspend_enter\[.*', name)):
continue
# -- phase changes --
# suspend_prepare start
if(re.match('dpm_prepare\[.*', t.name)):
phase = 'suspend_prepare'
if(not isbegin):
data.dmesg[phase]['end'] = t.time
continue
# suspend start
elif(re.match('dpm_suspend\[.*', t.name)):
phase = 'suspend'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_late start
elif(re.match('dpm_suspend_late\[.*', t.name)):
phase = 'suspend_late'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_noirq start
elif(re.match('dpm_suspend_noirq\[.*', t.name)):
phase = 'suspend_noirq'
data.setPhase(phase, t.time, isbegin)
if(not isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['start'] = t.time
continue
# suspend_machine/resume_machine
elif(re.match('machine_suspend\[.*', t.name)):
if(isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['end'] = t.time
data.tSuspended = t.time
else:
if(sysvals.suspendmode in ['mem', 'disk']):
data.dmesg['suspend_machine']['end'] = t.time
data.tSuspended = t.time
phase = 'resume_machine'
data.dmesg[phase]['start'] = t.time
data.tResumed = t.time
data.tLow = data.tResumed - data.tSuspended
continue
# resume_noirq start
elif(re.match('dpm_resume_noirq\[.*', t.name)):
phase = 'resume_noirq'
data.setPhase(phase, t.time, isbegin)
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
continue
# resume_early start
elif(re.match('dpm_resume_early\[.*', t.name)):
phase = 'resume_early'
data.setPhase(phase, t.time, isbegin)
continue
# resume start
elif(re.match('dpm_resume\[.*', t.name)):
phase = 'resume'
data.setPhase(phase, t.time, isbegin)
continue
# resume complete start
elif(re.match('dpm_complete\[.*', t.name)):
phase = 'resume_complete'
if(isbegin):
data.dmesg[phase]['start'] = t.time
continue
# is this trace event outside of the devices calls
if(data.isTraceEventOutsideDeviceCalls(pid, t.time)):
# global events (outside device calls) are simply graphed
if(name not in testrun.ttemp):
testrun.ttemp[name] = []
if(isbegin):
# create a new list entry
testrun.ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
if(len(testrun.ttemp[name]) > 0):
# if an antry exists, assume this is its end
testrun.ttemp[name][-1]['end'] = t.time
elif(phase == 'post_resume'):
# post resume events can just have ends
testrun.ttemp[name].append({
'begin': data.dmesg[phase]['start'],
'end': t.time})
else:
if(isbegin):
data.addIntraDevTraceEvent('', name, pid, t.time)
else:
data.capIntraDevTraceEvent('', name, pid, t.time)
# device callback start
elif(t.type == 'device_pm_callback_start'):
m = re.match('(?P<drv>.*) (?P<d>.*), parent: *(?P<p>.*), .*',\
t.name);
if(not m):
continue
drv = m.group('drv')
n = m.group('d')
p = m.group('p')
if(n and p):
data.newAction(phase, n, pid, p, t.time, -1, drv)
# device callback finish
elif(t.type == 'device_pm_callback_end'):
m = re.match('(?P<drv>.*) (?P<d>.*), err.*', t.name);
if(not m):
continue
n = m.group('d')
list = data.dmesg[phase]['list']
if(n in list):
dev = list[n]
dev['length'] = t.time - dev['start']
dev['end'] = t.time
# callgraph processing
elif sysvals.usecallgraph:
# this shouldn't happen, but JIC, ignore callgraph data post-res
if(phase == 'post_resume'):
continue
# create a callgraph object for the data
if(pid not in testrun.ftemp):
testrun.ftemp[pid] = []
testrun.ftemp[pid].append(FTraceCallGraph())
# when the call is finished, see which device matches it
cg = testrun.ftemp[pid][-1]
if(cg.addLine(t, m)):
testrun.ftemp[pid].append(FTraceCallGraph())
tf.close()
for test in testruns:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < test.data.start):
test.data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > test.data.end):
test.data.setEnd(end)
test.data.newActionGlobal(name, begin, end)
# add the callgraph data to the device hierarchy
borderphase = {
'dpm_prepare': 'suspend_prepare',
'dpm_complete': 'resume_complete'
}
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if len(cg.list) < 2:
continue
if(not cg.sanityCheck()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
if(cg.list[0].name in borderphase):
p = borderphase[cg.list[0].name]
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg.slice(dev['start'], dev['end'])
continue
if(cg.list[0].name != 'dpm_run_callback'):
continue
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
# fill in any missing phases
for data in testdata:
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing!' % p)
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
if(sysvals.verbose):
data.printDetails()
# add the time in between the tests as a new phase so we can see it
if(len(testdata) > 1):
t1e = testdata[0].getEnd()
t2s = testdata[-1].getStart()
testdata[-1].newPhaseWithSingleAction('user mode', \
'user mode', t1e, t2s, '#FF9966')
return testdata
# Function: loadKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# load the dmesg file into memory and fix up any ordering issues
# The dmesg filename is taken from sysvals
# Output:
# An array of empty Data objects with only their dmesgtext attributes set
def loadKernelLog():
global sysvals
vprint('Analyzing the dmesg data...')
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s doesnt exist' % sysvals.dmesgfile, False)
# there can be multiple test runs in a single file delineated by stamps
testruns = []
data = 0
lf = open(sysvals.dmesgfile, 'r')
for line in lf:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match(sysvals.stampfmt, line)
if(m):
if(data):
testruns.append(data)
data = Data(len(testruns))
parseStamp(m, data)
continue
if(not data):
continue
m = re.match(sysvals.firmwarefmt, line)
if(m):
data.fwSuspend = int(m.group('s'))
data.fwResume = int(m.group('r'))
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
continue
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
data.dmesgtext.append(line)
if(re.match('ACPI: resume from mwait', m.group('msg'))):
print('NOTE: This suspend appears to be freeze rather than'+\
' %s, it will be treated as such' % sysvals.suspendmode)
sysvals.suspendmode = 'freeze'
else:
vprint('ignoring dmesg line: %s' % line.replace('\n', ''))
testruns.append(data)
lf.close()
if(not data):
print('ERROR: analyze_suspend header missing from dmesg log')
sys.exit()
# fix lines with same timestamp/function with the call and return swapped
for data in testruns:
last = ''
for line in data.dmesgtext:
mc = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) calling '+\
'(?P<f>.*)\+ @ .*, parent: .*', line)
mr = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) call '+\
'(?P<f>.*)\+ returned .* after (?P<dt>.*) usecs', last)
if(mc and mr and (mc.group('t') == mr.group('t')) and
(mc.group('f') == mr.group('f'))):
i = data.dmesgtext.index(last)
j = data.dmesgtext.index(line)
data.dmesgtext[i] = line
data.dmesgtext[j] = last
last = line
return testruns
# Function: parseKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# Analyse a dmesg log output file generated from this app during
# the execution phase. Create a set of device structures in memory
# for subsequent formatting in the html output file
# This call is only for legacy support on kernels where the ftrace
# data lacks the suspend_resume or device_pm_callbacks trace events.
# Arguments:
# data: an empty Data object (with dmesgtext) obtained from loadKernelLog
# Output:
# The filled Data object
def parseKernelLog(data):
global sysvals
phase = 'suspend_runtime'
if(data.fwValid):
vprint('Firmware Suspend = %u ns, Firmware Resume = %u ns' % \
(data.fwSuspend, data.fwResume))
# dmesg phase match table
dm = {
'suspend_prepare': 'PM: Syncing filesystems.*',
'suspend': 'PM: Entering [a-z]* sleep.*',
'suspend_late': 'PM: suspend of devices complete after.*',
'suspend_noirq': 'PM: late suspend of devices complete after.*',
'suspend_machine': 'PM: noirq suspend of devices complete after.*',
'resume_machine': 'ACPI: Low-level resume complete.*',
'resume_noirq': 'ACPI: Waking up from system sleep state.*',
'resume_early': 'PM: noirq resume of devices complete after.*',
'resume': 'PM: early resume of devices complete after.*',
'resume_complete': 'PM: resume of devices complete after.*',
'post_resume': '.*Restarting tasks \.\.\..*',
}
if(sysvals.suspendmode == 'standby'):
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
elif(sysvals.suspendmode == 'disk'):
dm['suspend_late'] = 'PM: freeze of devices complete after.*'
dm['suspend_noirq'] = 'PM: late freeze of devices complete after.*'
dm['suspend_machine'] = 'PM: noirq freeze of devices complete after.*'
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
dm['resume_early'] = 'PM: noirq restore of devices complete after.*'
dm['resume'] = 'PM: early restore of devices complete after.*'
dm['resume_complete'] = 'PM: restore of devices complete after.*'
elif(sysvals.suspendmode == 'freeze'):
dm['resume_machine'] = 'ACPI: resume from mwait'
# action table (expected events that occur and show up in dmesg)
at = {
'sync_filesystems': {
'smsg': 'PM: Syncing filesystems.*',
'emsg': 'PM: Preparing system for mem sleep.*' },
'freeze_user_processes': {
'smsg': 'Freezing user space processes .*',
'emsg': 'Freezing remaining freezable tasks.*' },
'freeze_tasks': {
'smsg': 'Freezing remaining freezable tasks.*',
'emsg': 'PM: Entering (?P<mode>[a-z,A-Z]*) sleep.*' },
'ACPI prepare': {
'smsg': 'ACPI: Preparing to enter system sleep state.*',
'emsg': 'PM: Saving platform NVS memory.*' },
'PM vns': {
'smsg': 'PM: Saving platform NVS memory.*',
'emsg': 'Disabling non-boot CPUs .*' },
}
t0 = -1.0
cpu_start = -1.0
prevktime = -1.0
actions = dict()
for line in data.dmesgtext:
# -- preprocessing --
# parse each dmesg line into the time and message
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
val = m.group('ktime')
try:
ktime = float(val)
except:
doWarning('INVALID DMESG LINE: '+\
line.replace('\n', ''), 'dmesg')
continue
msg = m.group('msg')
# initialize data start to first line time
if t0 < 0:
data.setStart(ktime)
t0 = ktime
else:
continue
# hack for determining resume_machine end for freeze
if(not sysvals.usetraceevents and sysvals.suspendmode == 'freeze' \
and phase == 'resume_machine' and \
re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# -- phase changes --
# suspend start
if(re.match(dm['suspend_prepare'], msg)):
phase = 'suspend_prepare'
data.dmesg[phase]['start'] = ktime
data.setStart(ktime)
# suspend start
elif(re.match(dm['suspend'], msg)):
data.dmesg['suspend_prepare']['end'] = ktime
phase = 'suspend'
data.dmesg[phase]['start'] = ktime
# suspend_late start
elif(re.match(dm['suspend_late'], msg)):
data.dmesg['suspend']['end'] = ktime
phase = 'suspend_late'
data.dmesg[phase]['start'] = ktime
# suspend_noirq start
elif(re.match(dm['suspend_noirq'], msg)):
data.dmesg['suspend_late']['end'] = ktime
phase = 'suspend_noirq'
data.dmesg[phase]['start'] = ktime
# suspend_machine start
elif(re.match(dm['suspend_machine'], msg)):
data.dmesg['suspend_noirq']['end'] = ktime
phase = 'suspend_machine'
data.dmesg[phase]['start'] = ktime
# resume_machine start
elif(re.match(dm['resume_machine'], msg)):
if(sysvals.suspendmode in ['freeze', 'standby']):
data.tSuspended = prevktime
data.dmesg['suspend_machine']['end'] = prevktime
else:
data.tSuspended = ktime
data.dmesg['suspend_machine']['end'] = ktime
phase = 'resume_machine'
data.tResumed = ktime
data.tLow = data.tResumed - data.tSuspended
data.dmesg[phase]['start'] = ktime
# resume_noirq start
elif(re.match(dm['resume_noirq'], msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# resume_early start
elif(re.match(dm['resume_early'], msg)):
data.dmesg['resume_noirq']['end'] = ktime
phase = 'resume_early'
data.dmesg[phase]['start'] = ktime
# resume start
elif(re.match(dm['resume'], msg)):
data.dmesg['resume_early']['end'] = ktime
phase = 'resume'
data.dmesg[phase]['start'] = ktime
# resume complete start
elif(re.match(dm['resume_complete'], msg)):
data.dmesg['resume']['end'] = ktime
phase = 'resume_complete'
data.dmesg[phase]['start'] = ktime
# post resume start
elif(re.match(dm['post_resume'], msg)):
data.dmesg['resume_complete']['end'] = ktime
data.setEnd(ktime)
phase = 'post_resume'
break
# -- device callbacks --
if(phase in data.phases):
# device init call
if(re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
sm = re.match('calling (?P<f>.*)\+ @ '+\
'(?P<n>.*), parent: (?P<p>.*)', msg);
f = sm.group('f')
n = sm.group('n')
p = sm.group('p')
if(f and n and p):
data.newAction(phase, f, int(n), p, ktime, -1, '')
# device init return
elif(re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs', msg)):
sm = re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs(?P<a>.*)', msg);
f = sm.group('f')
t = sm.group('t')
list = data.dmesg[phase]['list']
if(f in list):
dev = list[f]
dev['length'] = int(t)
dev['end'] = ktime
# -- non-devicecallback actions --
# if trace events are not available, these are better than nothing
if(not sysvals.usetraceevents):
# look for known actions
for a in at:
if(re.match(at[a]['smsg'], msg)):
if(a not in actions):
actions[a] = []
actions[a].append({'begin': ktime, 'end': ktime})
if(re.match(at[a]['emsg'], msg)):
actions[a][-1]['end'] = ktime
# now look for CPU on/off events
if(re.match('Disabling non-boot CPUs .*', msg)):
# start of first cpu suspend
cpu_start = ktime
elif(re.match('Enabling non-boot CPUs .*', msg)):
# start of first cpu resume
cpu_start = ktime
elif(re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)):
# end of a cpu suspend, start of the next
m = re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
elif(re.match('CPU(?P<cpu>[0-9]*) is up', msg)):
# end of a cpu resume, start of the next
m = re.match('CPU(?P<cpu>[0-9]*) is up', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
prevktime = ktime
# fill in any missing phases
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing, something went wrong!' % p)
print(' In %s, this dmesg line denotes the start of %s:' % \
(sysvals.suspendmode, p))
print(' "%s"' % dm[p])
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
# fill in any actions we've found
for name in actions:
for event in actions[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < data.start):
data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > data.end):
data.setEnd(end)
data.newActionGlobal(name, begin, end)
if(sysvals.verbose):
data.printDetails()
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
return True
# Function: setTimelineRows
# Description:
# Organize the timeline entries into the smallest
# number of rows possible, with no entry overlapping
# Arguments:
# list: the list of devices/actions for a single phase
# sortedkeys: cronologically sorted key list to use
# Output:
# The total number of rows needed to display this phase of the timeline
def setTimelineRows(list, sortedkeys):
# clear all rows and set them to undefined
remaining = len(list)
rowdata = dict()
row = 0
for item in list:
list[item]['row'] = -1
# try to pack each row with as many ranges as possible
while(remaining > 0):
if(row not in rowdata):
rowdata[row] = []
for item in sortedkeys:
if(list[item]['row'] < 0):
s = list[item]['start']
e = list[item]['end']
valid = True
for ritem in rowdata[row]:
rs = ritem['start']
re = ritem['end']
if(not (((s <= rs) and (e <= rs)) or
((s >= re) and (e >= re)))):
valid = False
break
if(valid):
rowdata[row].append(list[item])
list[item]['row'] = row
remaining -= 1
row += 1
return row
# Function: createTimeScale
# Description:
# Create the timescale header for the html timeline
# Arguments:
# t0: start time (suspend begin)
# tMax: end time (resume end)
# tSuspend: time when suspend occurs, i.e. the zero time
# Output:
# The html code needed to display the time scale
def createTimeScale(t0, tMax, tSuspended):
timescale = '<div class="t" style="right:{0}%">{1}</div>\n'
output = '<div id="timescale">\n'
# set scale for timeline
tTotal = tMax - t0
tS = 0.1
if(tTotal <= 0):
return output
if(tTotal > 4):
tS = 1
if(tSuspended < 0):
for i in range(int(tTotal/tS)+1):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/tTotal))
if(i > 0):
val = '%0.fms' % (float(i)*tS*1000)
else:
val = ''
output += timescale.format(pos, val)
else:
tSuspend = tSuspended - t0
divTotal = int(tTotal/tS) + 1
divSuspend = int(tSuspend/tS)
s0 = (tSuspend - tS*divSuspend)*100/tTotal
for i in range(divTotal):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/tTotal) - s0)
if((i == 0) and (s0 < 3)):
val = ''
elif(i == divSuspend):
val = 'S/R'
else:
val = '%0.fms' % (float(i-divSuspend)*tS*1000)
output += timescale.format(pos, val)
output += '</div>\n'
return output
# Function: createHTMLSummarySimple
# Description:
# Create summary html file for a series of tests
# Arguments:
# testruns: array of Data objects from parseTraceLog
def createHTMLSummarySimple(testruns, htmlfile):
global sysvals
# print out the basic summary of all the tests
hf = open(htmlfile, 'w')
# write the html header first (html head, css code, up to body start)
html = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend Summary</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:#495E09;line-height:30px;color:white;font: 25px Arial;}\n\
table {width:100%;border-collapse: collapse;}\n\
.summary {font: 22px Arial;border:1px solid;}\n\
th {border: 1px solid black;background-color:#A7C942;color:white;}\n\
td {text-align: center;}\n\
tr.alt td {background-color:#EAF2D3;}\n\
tr.avg td {background-color:#BDE34C;}\n\
a:link {color: #90B521;}\n\
a:visited {color: #495E09;}\n\
a:hover {color: #B1DF28;}\n\
a:active {color: #FFFFFF;}\n\
</style>\n</head>\n<body>\n'
# group test header
count = len(testruns)
headline_stamp = '<div class="stamp">{0} {1} {2} {3} ({4} tests)</div>\n'
html += headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'],
sysvals.stamp['time'], count)
# check to see if all the tests have the same value
stampcolumns = False
for data in testruns:
if diffStamp(sysvals.stamp, data.stamp):
stampcolumns = True
break
th = '\t<th>{0}</th>\n'
td = '\t<td>{0}</td>\n'
tdlink = '\t<td><a href="{0}">Click Here</a></td>\n'
# table header
html += '<table class="summary">\n<tr>\n'
html += th.format("Test #")
if stampcolumns:
html += th.format("Hostname")
html += th.format("Kernel Version")
html += th.format("Suspend Mode")
html += th.format("Test Time")
html += th.format("Suspend Time")
html += th.format("Resume Time")
html += th.format("Detail")
html += '</tr>\n'
# test data, 1 row per test
sTimeAvg = 0.0
rTimeAvg = 0.0
num = 1
for data in testruns:
# data.end is the end of post_resume
resumeEnd = data.dmesg['resume_complete']['end']
if num % 2 == 1:
html += '<tr class="alt">\n'
else:
html += '<tr>\n'
# test num
html += td.format("test %d" % num)
num += 1
if stampcolumns:
# host name
val = "unknown"
if('host' in data.stamp):
val = data.stamp['host']
html += td.format(val)
# host kernel
val = "unknown"
if('kernel' in data.stamp):
val = data.stamp['kernel']
html += td.format(val)
# suspend mode
val = "unknown"
if('mode' in data.stamp):
val = data.stamp['mode']
html += td.format(val)
# test time
val = "unknown"
if('time' in data.stamp):
val = data.stamp['time']
html += td.format(val)
# suspend time
sTime = (data.tSuspended - data.start)*1000
sTimeAvg += sTime
html += td.format("%3.3f ms" % sTime)
# resume time
rTime = (resumeEnd - data.tResumed)*1000
rTimeAvg += rTime
html += td.format("%3.3f ms" % rTime)
# link to the output html
html += tdlink.format(data.outfile)
html += '</tr>\n'
# last line: test average
if(count > 0):
sTimeAvg /= count
rTimeAvg /= count
html += '<tr class="avg">\n'
html += td.format('Average') # name
if stampcolumns:
html += td.format('') # host
html += td.format('') # kernel
html += td.format('') # mode
html += td.format('') # time
html += td.format("%3.3f ms" % sTimeAvg) # suspend time
html += td.format("%3.3f ms" % rTimeAvg) # resume time
html += td.format('') # output link
html += '</tr>\n'
# flush the data to file
hf.write(html+'</table>\n')
hf.write('</body>\n</html>\n')
hf.close()
# Function: createHTML
# Description:
# Create the output html file from the resident test data
# Arguments:
# testruns: array of Data objects from parseKernelLog or parseTraceLog
# Output:
# True if the html file was created, false if it failed
def createHTML(testruns):
global sysvals
for data in testruns:
data.normalizeTime(testruns[-1].tSuspended)
x2changes = ['', 'absolute']
if len(testruns) > 1:
x2changes = ['1', 'relative']
# html function templates
headline_stamp = '<div class="stamp">{0} {1} {2} {3}</div>\n'
html_devlist1 = '<button id="devlist1" class="devlist" style="float:left;">Device Detail%s</button>' % x2changes[0]
html_zoombox = '<center><button id="zoomin">ZOOM IN</button><button id="zoomout">ZOOM OUT</button><button id="zoomdef">ZOOM 1:1</button></center>\n'
html_devlist2 = '<button id="devlist2" class="devlist" style="float:right;">Device Detail2</button>\n'
html_timeline = '<div id="dmesgzoombox" class="zoombox">\n<div id="{0}" class="timeline" style="height:{1}px">\n'
html_device = '<div id="{0}" title="{1}" class="thread" style="left:{2}%;top:{3}%;height:{4}%;width:{5}%;">{6}</div>\n'
html_traceevent = '<div title="{0}" class="traceevent" style="left:{1}%;top:{2}%;height:{3}%;width:{4}%;border:1px solid {5};background-color:{5}">{6}</div>\n'
html_phase = '<div class="phase" style="left:{0}%;width:{1}%;top:{2}%;height:{3}%;background-color:{4}">{5}</div>\n'
html_phaselet = '<div id="{0}" class="phaselet" style="left:{1}%;width:{2}%;background-color:{3}"></div>\n'
html_legend = '<div class="square" style="left:{0}%;background-color:{1}"> {2}</div>\n'
html_timetotal = '<table class="time1">\n<tr>'\
'<td class="green">{2} Suspend Time: <b>{0} ms</b></td>'\
'<td class="yellow">{2} Resume Time: <b>{1} ms</b></td>'\
'</tr>\n</table>\n'
html_timetotal2 = '<table class="time1">\n<tr>'\
'<td class="green">{3} Suspend Time: <b>{0} ms</b></td>'\
'<td class="gray">'+sysvals.suspendmode+' time: <b>{1} ms</b></td>'\
'<td class="yellow">{3} Resume Time: <b>{2} ms</b></td>'\
'</tr>\n</table>\n'
html_timegroups = '<table class="time2">\n<tr>'\
'<td class="green">{4}Kernel Suspend: {0} ms</td>'\
'<td class="purple">{4}Firmware Suspend: {1} ms</td>'\
'<td class="purple">{4}Firmware Resume: {2} ms</td>'\
'<td class="yellow">{4}Kernel Resume: {3} ms</td>'\
'</tr>\n</table>\n'
# device timeline
vprint('Creating Device Timeline...')
devtl = Timeline()
# Generate the header for this timeline
textnum = ['First', 'Second']
for data in testruns:
tTotal = data.end - data.start
tEnd = data.dmesg['resume_complete']['end']
if(tTotal == 0):
print('ERROR: No timeline data')
sys.exit()
if(data.tLow > 0):
low_time = '%.0f'%(data.tLow*1000)
if data.fwValid:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000 + \
(data.fwSuspend/1000000.0))
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000 + \
(data.fwResume/1000000.0))
testdesc1 = 'Total'
testdesc2 = ''
if(len(testruns) > 1):
testdesc1 = testdesc2 = textnum[data.testnumber]
testdesc2 += ' '
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc1)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc1)
devtl.html['timeline'] += thtml
sktime = '%.3f'%((data.dmesg['suspend_machine']['end'] - \
data.getStart())*1000)
sftime = '%.3f'%(data.fwSuspend / 1000000.0)
rftime = '%.3f'%(data.fwResume / 1000000.0)
rktime = '%.3f'%((data.getEnd() - \
data.dmesg['resume_machine']['start'])*1000)
devtl.html['timeline'] += html_timegroups.format(sktime, \
sftime, rftime, rktime, testdesc2)
else:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000)
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000)
testdesc = 'Kernel'
if(len(testruns) > 1):
testdesc = textnum[data.testnumber]+' '+testdesc
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc)
devtl.html['timeline'] += thtml
# time scale for potentially multiple datasets
t0 = testruns[0].start
tMax = testruns[-1].end
tSuspended = testruns[-1].tSuspended
tTotal = tMax - t0
# determine the maximum number of rows we need to draw
timelinerows = 0
for data in testruns:
for phase in data.dmesg:
list = data.dmesg[phase]['list']
rows = setTimelineRows(list, list)
data.dmesg[phase]['row'] = rows
if(rows > timelinerows):
timelinerows = rows
# calculate the timeline height and create bounding box, add buttons
devtl.setRows(timelinerows + 1)
devtl.html['timeline'] += html_devlist1
if len(testruns) > 1:
devtl.html['timeline'] += html_devlist2
devtl.html['timeline'] += html_zoombox
devtl.html['timeline'] += html_timeline.format('dmesg', devtl.height)
# draw the colored boxes for each of the phases
for data in testruns:
for b in data.dmesg:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
devtl.html['timeline'] += html_phase.format(left, width, \
'%.3f'%devtl.scaleH, '%.3f'%(100-devtl.scaleH), \
data.dmesg[b]['color'], '')
# draw the time scale, try to make the number of labels readable
devtl.html['scale'] = createTimeScale(t0, tMax, tSuspended)
devtl.html['timeline'] += devtl.html['scale']
for data in testruns:
for b in data.dmesg:
phaselist = data.dmesg[b]['list']
for d in phaselist:
name = d
drv = ''
dev = phaselist[d]
if(d in sysvals.altdevname):
name = sysvals.altdevname[d]
if('drv' in dev and dev['drv']):
drv = ' {%s}' % dev['drv']
height = (100.0 - devtl.scaleH)/data.dmesg[b]['row']
top = '%.3f' % ((dev['row']*height) + devtl.scaleH)
left = '%.3f' % (((dev['start']-t0)*100)/tTotal)
width = '%.3f' % (((dev['end']-dev['start'])*100)/tTotal)
length = ' (%0.3f ms) ' % ((dev['end']-dev['start'])*1000)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += html_device.format(dev['id'], \
d+drv+length+b, left, top, '%.3f'%height, width, name+drv)
# draw any trace events found
for data in testruns:
for b in data.dmesg:
phaselist = data.dmesg[b]['list']
for name in phaselist:
dev = phaselist[name]
if('traceevents' in dev):
vprint('Debug trace events found for device %s' % name)
vprint('%20s %20s %10s %8s' % ('action', \
'name', 'time(ms)', 'length(ms)'))
for e in dev['traceevents']:
vprint('%20s %20s %10.3f %8.3f' % (e.action, \
e.name, e.time*1000, e.length*1000))
height = (100.0 - devtl.scaleH)/data.dmesg[b]['row']
top = '%.3f' % ((dev['row']*height) + devtl.scaleH)
left = '%.3f' % (((e.time-t0)*100)/tTotal)
width = '%.3f' % (e.length*100/tTotal)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += \
html_traceevent.format(e.action+' '+e.name, \
left, top, '%.3f'%height, \
width, e.color, '')
# timeline is finished
devtl.html['timeline'] += '</div>\n</div>\n'
# draw a legend which describes the phases by color
data = testruns[-1]
devtl.html['legend'] = '<div class="legend">\n'
pdelta = 100.0/len(data.phases)
pmargin = pdelta / 4.0
for phase in data.phases:
order = '%.2f' % ((data.dmesg[phase]['order'] * pdelta) + pmargin)
name = string.replace(phase, '_', ' ')
devtl.html['legend'] += html_legend.format(order, \
data.dmesg[phase]['color'], name)
devtl.html['legend'] += '</div>\n'
hf = open(sysvals.htmlfile, 'w')
thread_height = 0
# write the html header first (html head, css code, up to body start)
html_header = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:gray;line-height:30px;color:white;font: 25px Arial;}\n\
.callgraph {margin-top: 30px;box-shadow: 5px 5px 20px black;}\n\
.callgraph article * {padding-left: 28px;}\n\
h1 {color:black;font: bold 30px Times;}\n\
t0 {color:black;font: bold 30px Times;}\n\
t1 {color:black;font: 30px Times;}\n\
t2 {color:black;font: 25px Times;}\n\
t3 {color:black;font: 20px Times;white-space:nowrap;}\n\
t4 {color:black;font: bold 30px Times;line-height:60px;white-space:nowrap;}\n\
table {width:100%;}\n\
.gray {background-color:rgba(80,80,80,0.1);}\n\
.green {background-color:rgba(204,255,204,0.4);}\n\
.purple {background-color:rgba(128,0,128,0.2);}\n\
.yellow {background-color:rgba(255,255,204,0.4);}\n\
.time1 {font: 22px Arial;border:1px solid;}\n\
.time2 {font: 15px Arial;border-bottom:1px solid;border-left:1px solid;border-right:1px solid;}\n\
td {text-align: center;}\n\
r {color:#500000;font:15px Tahoma;}\n\
n {color:#505050;font:15px Tahoma;}\n\
.tdhl {color: red;}\n\
.hide {display: none;}\n\
.pf {display: none;}\n\
.pf:checked + label {background: url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/><rect x="8" y="4" width="2" height="10" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:not(:checked) ~ label {background: url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:checked ~ *:not(:nth-child(2)) {display: none;}\n\
.zoombox {position: relative; width: 100%; overflow-x: scroll;}\n\
.timeline {position: relative; font-size: 14px;cursor: pointer;width: 100%; overflow: hidden; background-color:#dddddd;}\n\
.thread {position: absolute; height: '+'%.3f'%thread_height+'%; overflow: hidden; line-height: 30px; border:1px solid;text-align:center;white-space:nowrap;background-color:rgba(204,204,204,0.5);}\n\
.thread:hover {background-color:white;border:1px solid red;z-index:10;}\n\
.hover {background-color:white;border:1px solid red;z-index:10;}\n\
.traceevent {position: absolute;opacity: 0.3;height: '+'%.3f'%thread_height+'%;width:0;overflow:hidden;line-height:30px;text-align:center;white-space:nowrap;}\n\
.phase {position: absolute;overflow: hidden;border:0px;text-align:center;}\n\
.phaselet {position:absolute;overflow:hidden;border:0px;text-align:center;height:100px;font-size:24px;}\n\
.t {position:absolute;top:0%;height:100%;border-right:1px solid black;}\n\
.legend {position: relative; width: 100%; height: 40px; text-align: center;margin-bottom:20px}\n\
.legend .square {position:absolute;top:10px; width: 0px;height: 20px;border:1px solid;padding-left:20px;}\n\
button {height:40px;width:200px;margin-bottom:20px;margin-top:20px;font-size:24px;}\n\
.devlist {position:'+x2changes[1]+';width:190px;}\n\
#devicedetail {height:100px;box-shadow: 5px 5px 20px black;}\n\
</style>\n</head>\n<body>\n'
hf.write(html_header)
# write the test title and general info header
if(sysvals.stamp['time'] != ""):
hf.write(headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'], \
sysvals.stamp['time']))
# write the device timeline
hf.write(devtl.html['timeline'])
hf.write(devtl.html['legend'])
hf.write('<div id="devicedetailtitle"></div>\n')
hf.write('<div id="devicedetail" style="display:none;">\n')
# draw the colored boxes for the device detail section
for data in testruns:
hf.write('<div id="devicedetail%d">\n' % data.testnumber)
for b in data.phases:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
hf.write(html_phaselet.format(b, left, width, \
data.dmesg[b]['color']))
hf.write('</div>\n')
hf.write('</div>\n')
# write the ftrace data (callgraph)
data = testruns[-1]
if(sysvals.usecallgraph):
hf.write('<section id="callgraphs" class="callgraph">\n')
# write out the ftrace data converted to html
html_func_top = '<article id="{0}" class="atop" style="background-color:{1}">\n<input type="checkbox" class="pf" id="f{2}" checked/><label for="f{2}">{3} {4}</label>\n'
html_func_start = '<article>\n<input type="checkbox" class="pf" id="f{0}" checked/><label for="f{0}">{1} {2}</label>\n'
html_func_end = '</article>\n'
html_func_leaf = '<article>{0} {1}</article>\n'
num = 0
for p in data.phases:
list = data.dmesg[p]['list']
for devname in data.sortedDevices(p):
if('ftrace' not in list[devname]):
continue
name = devname
if(devname in sysvals.altdevname):
name = sysvals.altdevname[devname]
devid = list[devname]['id']
cg = list[devname]['ftrace']
flen = '<r>(%.3f ms @ %.3f to %.3f)</r>' % \
((cg.end - cg.start)*1000, cg.start*1000, cg.end*1000)
hf.write(html_func_top.format(devid, data.dmesg[p]['color'], \
num, name+' '+p, flen))
num += 1
for line in cg.list:
if(line.length < 0.000000001):
flen = ''
else:
flen = '<n>(%.3f ms @ %.3f)</n>' % (line.length*1000, \
line.time*1000)
if(line.freturn and line.fcall):
hf.write(html_func_leaf.format(line.name, flen))
elif(line.freturn):
hf.write(html_func_end)
else:
hf.write(html_func_start.format(num, line.name, flen))
num += 1
hf.write(html_func_end)
hf.write('\n\n </section>\n')
# write the footer and close
addScriptCode(hf, testruns)
hf.write('</body>\n</html>\n')
hf.close()
return True
# Function: addScriptCode
# Description:
# Adds the javascript code to the output html
# Arguments:
# hf: the open html file pointer
# testruns: array of Data objects from parseKernelLog or parseTraceLog
def addScriptCode(hf, testruns):
t0 = (testruns[0].start - testruns[-1].tSuspended) * 1000
tMax = (testruns[-1].end - testruns[-1].tSuspended) * 1000
# create an array in javascript memory with the device details
detail = ' var devtable = [];\n'
for data in testruns:
topo = data.deviceTopology()
detail += ' devtable[%d] = "%s";\n' % (data.testnumber, topo)
detail += ' var bounds = [%f,%f];\n' % (t0, tMax)
# add the code which will manipulate the data in the browser
script_code = \
'<script type="text/javascript">\n'+detail+\
' function zoomTimeline() {\n'\
' var timescale = document.getElementById("timescale");\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var zoombox = document.getElementById("dmesgzoombox");\n'\
' var val = parseFloat(dmesg.style.width);\n'\
' var newval = 100;\n'\
' var sh = window.outerWidth / 2;\n'\
' if(this.id == "zoomin") {\n'\
' newval = val * 1.2;\n'\
' if(newval > 40000) newval = 40000;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else if (this.id == "zoomout") {\n'\
' newval = val / 1.2;\n'\
' if(newval < 100) newval = 100;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else {\n'\
' zoombox.scrollLeft = 0;\n'\
' dmesg.style.width = "100%";\n'\
' }\n'\
' var html = "";\n'\
' var t0 = bounds[0];\n'\
' var tMax = bounds[1];\n'\
' var tTotal = tMax - t0;\n'\
' var wTotal = tTotal * 100.0 / newval;\n'\
' for(var tS = 1000; (wTotal / tS) < 3; tS /= 10);\n'\
' if(tS < 1) tS = 1;\n'\
' for(var s = ((t0 / tS)|0) * tS; s < tMax; s += tS) {\n'\
' var pos = (tMax - s) * 100.0 / tTotal;\n'\
' var name = (s == 0)?"S/R":(s+"ms");\n'\
' html += "<div class=\\"t\\" style=\\"right:"+pos+"%\\">"+name+"</div>";\n'\
' }\n'\
' timescale.innerHTML = html;\n'\
' }\n'\
' function deviceHover() {\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' dev[i].className = "thread hover";\n'\
' } else {\n'\
' dev[i].className = "thread";\n'\
' }\n'\
' }\n'\
' }\n'\
' function deviceUnhover() {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].className = "thread";\n'\
' }\n'\
' }\n'\
' function deviceTitle(title, total, cpu) {\n'\
' var prefix = "Total";\n'\
' if(total.length > 3) {\n'\
' prefix = "Average";\n'\
' total[1] = (total[1]+total[3])/2;\n'\
' total[2] = (total[2]+total[4])/2;\n'\
' }\n'\
' var devtitle = document.getElementById("devicedetailtitle");\n'\
' var name = title.slice(0, title.indexOf(" "));\n'\
' if(cpu >= 0) name = "CPU"+cpu;\n'\
' var driver = "";\n'\
' var tS = "<t2>(</t2>";\n'\
' var tR = "<t2>)</t2>";\n'\
' if(total[1] > 0)\n'\
' tS = "<t2>("+prefix+" Suspend:</t2><t0> "+total[1].toFixed(3)+" ms</t0> ";\n'\
' if(total[2] > 0)\n'\
' tR = " <t2>"+prefix+" Resume:</t2><t0> "+total[2].toFixed(3)+" ms<t2>)</t2></t0>";\n'\
' var s = title.indexOf("{");\n'\
' var e = title.indexOf("}");\n'\
' if((s >= 0) && (e >= 0))\n'\
' driver = title.slice(s+1, e) + " <t1>@</t1> ";\n'\
' if(total[1] > 0 && total[2] > 0)\n'\
' devtitle.innerHTML = "<t0>"+driver+name+"</t0> "+tS+tR;\n'\
' else\n'\
' devtitle.innerHTML = "<t0>"+title+"</t0>";\n'\
' return name;\n'\
' }\n'\
' function deviceDetail() {\n'\
' var devinfo = document.getElementById("devicedetail");\n'\
' devinfo.style.display = "block";\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var idlist = [];\n'\
' var pdata = [[]];\n'\
' var pd = pdata[0];\n'\
' var total = [0.0, 0.0, 0.0];\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' idlist[idlist.length] = dev[i].id;\n'\
' var tidx = 1;\n'\
' if(dev[i].id[0] == "a") {\n'\
' pd = pdata[0];\n'\
' } else {\n'\
' if(pdata.length == 1) pdata[1] = [];\n'\
' if(total.length == 3) total[3]=total[4]=0.0;\n'\
' pd = pdata[1];\n'\
' tidx = 3;\n'\
' }\n'\
' var info = dev[i].title.split(" ");\n'\
' var pname = info[info.length-1];\n'\
' pd[pname] = parseFloat(info[info.length-3].slice(1));\n'\
' total[0] += pd[pname];\n'\
' if(pname.indexOf("suspend") >= 0)\n'\
' total[tidx] += pd[pname];\n'\
' else\n'\
' total[tidx+1] += pd[pname];\n'\
' }\n'\
' }\n'\
' var devname = deviceTitle(this.title, total, cpu);\n'\
' var left = 0.0;\n'\
' for (var t = 0; t < pdata.length; t++) {\n'\
' pd = pdata[t];\n'\
' devinfo = document.getElementById("devicedetail"+t);\n'\
' var phases = devinfo.getElementsByClassName("phaselet");\n'\
' for (var i = 0; i < phases.length; i++) {\n'\
' if(phases[i].id in pd) {\n'\
' var w = 100.0*pd[phases[i].id]/total[0];\n'\
' var fs = 32;\n'\
' if(w < 8) fs = 4*w | 0;\n'\
' var fs2 = fs*3/4;\n'\
' phases[i].style.width = w+"%";\n'\
' phases[i].style.left = left+"%";\n'\
' phases[i].title = phases[i].id+" "+pd[phases[i].id]+" ms";\n'\
' left += w;\n'\
' var time = "<t4 style=\\"font-size:"+fs+"px\\">"+pd[phases[i].id]+" ms<br></t4>";\n'\
' var pname = "<t3 style=\\"font-size:"+fs2+"px\\">"+phases[i].id.replace("_", " ")+"</t3>";\n'\
' phases[i].innerHTML = time+pname;\n'\
' } else {\n'\
' phases[i].style.width = "0%";\n'\
' phases[i].style.left = left+"%";\n'\
' }\n'\
' }\n'\
' }\n'\
' var cglist = document.getElementById("callgraphs");\n'\
' if(!cglist) return;\n'\
' var cg = cglist.getElementsByClassName("atop");\n'\
' for (var i = 0; i < cg.length; i++) {\n'\
' if(idlist.indexOf(cg[i].id) >= 0) {\n'\
' cg[i].style.display = "block";\n'\
' } else {\n'\
' cg[i].style.display = "none";\n'\
' }\n'\
' }\n'\
' }\n'\
' function devListWindow(e) {\n'\
' var sx = e.clientX;\n'\
' if(sx > window.innerWidth - 440)\n'\
' sx = window.innerWidth - 440;\n'\
' var cfg="top="+e.screenY+", left="+sx+", width=440, height=720, scrollbars=yes";\n'\
' var win = window.open("", "_blank", cfg);\n'\
' if(window.chrome) win.moveBy(sx, 0);\n'\
' var html = "<title>"+e.target.innerHTML+"</title>"+\n'\
' "<style type=\\"text/css\\">"+\n'\
' " ul {list-style-type:circle;padding-left:10px;margin-left:10px;}"+\n'\
' "</style>"\n'\
' var dt = devtable[0];\n'\
' if(e.target.id != "devlist1")\n'\
' dt = devtable[1];\n'\
' win.document.write(html+dt);\n'\
' }\n'\
' window.addEventListener("load", function () {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' dmesg.style.width = "100%"\n'\
' document.getElementById("zoomin").onclick = zoomTimeline;\n'\
' document.getElementById("zoomout").onclick = zoomTimeline;\n'\
' document.getElementById("zoomdef").onclick = zoomTimeline;\n'\
' var devlist = document.getElementsByClassName("devlist");\n'\
' for (var i = 0; i < devlist.length; i++)\n'\
' devlist[i].onclick = devListWindow;\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].onclick = deviceDetail;\n'\
' dev[i].onmouseover = deviceHover;\n'\
' dev[i].onmouseout = deviceUnhover;\n'\
' }\n'\
' zoomTimeline();\n'\
' });\n'\
'</script>\n'
hf.write(script_code);
# Function: executeSuspend
# Description:
# Execute system suspend through the sysfs interface, then copy the output
# dmesg and ftrace files to the test output directory.
def executeSuspend():
global sysvals
detectUSB(False)
t0 = time.time()*1000
tp = sysvals.tpath
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# clear the kernel ring buffer just as we start
os.system('dmesg -C')
# enable callgraph ftrace only for the second run
if(sysvals.usecallgraph and count == 2):
# set trace type
os.system('echo function_graph > '+tp+'current_tracer')
os.system('echo "" > '+tp+'set_ftrace_filter')
# set trace format options
os.system('echo funcgraph-abstime > '+tp+'trace_options')
os.system('echo funcgraph-proc > '+tp+'trace_options')
# focus only on device suspend and resume
os.system('cat '+tp+'available_filter_functions | '+\
'grep dpm_run_callback > '+tp+'set_graph_function')
# if this is test2 and there's a delay, start here
if(count > 1 and sysvals.x2delay > 0):
tN = time.time()*1000
while (tN - t0) < sysvals.x2delay:
tN = time.time()*1000
time.sleep(0.001)
# start ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('START TRACING')
os.system('echo 1 > '+tp+'tracing_on')
# initiate suspend
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo SUSPEND START > '+tp+'trace_marker')
if(sysvals.rtcwake):
print('SUSPEND START')
print('will autoresume in %d seconds' % sysvals.rtcwaketime)
sysvals.rtcWakeAlarm()
else:
print('SUSPEND START (press a key to resume)')
pf = open(sysvals.powerfile, 'w')
pf.write(sysvals.suspendmode)
# execution will pause here
pf.close()
t0 = time.time()*1000
# return from suspend
print('RESUME COMPLETE')
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo RESUME COMPLETE > '+tp+'trace_marker')
# see if there's firmware timing data to be had
t = sysvals.postresumetime
if(t > 0):
print('Waiting %d seconds for POST-RESUME trace events...' % t)
time.sleep(t)
# stop ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo 0 > '+tp+'tracing_on')
print('CAPTURING TRACE')
writeDatafileHeader(sysvals.ftracefile)
os.system('cat '+tp+'trace >> '+sysvals.ftracefile)
os.system('echo "" > '+tp+'trace')
# grab a copy of the dmesg output
print('CAPTURING DMESG')
writeDatafileHeader(sysvals.dmesgfile)
os.system('dmesg -c >> '+sysvals.dmesgfile)
def writeDatafileHeader(filename):
global sysvals
fw = getFPDT(False)
prt = sysvals.postresumetime
fp = open(filename, 'a')
fp.write(sysvals.teststamp+'\n')
if(fw):
fp.write('# fwsuspend %u fwresume %u\n' % (fw[0], fw[1]))
if(prt > 0):
fp.write('# post resume time %u\n' % prt)
fp.close()
# Function: executeAndroidSuspend
# Description:
# Execute system suspend through the sysfs interface
# on a remote android device, then transfer the output
# dmesg and ftrace files to the local output directory.
def executeAndroidSuspend():
global sysvals
# check to see if the display is currently off
tp = sysvals.tpath
out = os.popen(sysvals.adb+\
' shell dumpsys power | grep mScreenOn').read().strip()
# if so we need to turn it on so we can issue a new suspend
if(out.endswith('false')):
print('Waking the device up for the test...')
# send the KEYPAD_POWER keyevent to wake it up
os.system(sysvals.adb+' shell input keyevent 26')
# wait a few seconds so the user can see the device wake up
time.sleep(3)
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# clear the kernel ring buffer just as we start
os.system(sysvals.adb+' shell dmesg -c > /dev/null 2>&1')
# start ftrace
if(sysvals.usetraceevents):
print('START TRACING')
os.system(sysvals.adb+" shell 'echo 1 > "+tp+"tracing_on'")
# initiate suspend
for count in range(1,sysvals.execcount+1):
if(sysvals.usetraceevents):
os.system(sysvals.adb+\
" shell 'echo SUSPEND START > "+tp+"trace_marker'")
print('SUSPEND START (press a key on the device to resume)')
os.system(sysvals.adb+" shell 'echo "+sysvals.suspendmode+\
" > "+sysvals.powerfile+"'")
# execution will pause here, then adb will exit
while(True):
check = os.popen(sysvals.adb+\
' shell pwd 2>/dev/null').read().strip()
if(len(check) > 0):
break
time.sleep(1)
if(sysvals.usetraceevents):
os.system(sysvals.adb+" shell 'echo RESUME COMPLETE > "+tp+\
"trace_marker'")
# return from suspend
print('RESUME COMPLETE')
# stop ftrace
if(sysvals.usetraceevents):
os.system(sysvals.adb+" shell 'echo 0 > "+tp+"tracing_on'")
print('CAPTURING TRACE')
os.system('echo "'+sysvals.teststamp+'" > '+sysvals.ftracefile)
os.system(sysvals.adb+' shell cat '+tp+\
'trace >> '+sysvals.ftracefile)
# grab a copy of the dmesg output
print('CAPTURING DMESG')
os.system('echo "'+sysvals.teststamp+'" > '+sysvals.dmesgfile)
os.system(sysvals.adb+' shell dmesg >> '+sysvals.dmesgfile)
# Function: setUSBDevicesAuto
# Description:
# Set the autosuspend control parameter of all USB devices to auto
# This can be dangerous, so use at your own risk, most devices are set
# to always-on since the kernel cant determine if the device can
# properly autosuspend
def setUSBDevicesAuto():
global sysvals
rootCheck()
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
os.system('echo auto > %s/power/control' % dirname)
name = dirname.split('/')[-1]
desc = os.popen('cat %s/product 2>/dev/null' % \
dirname).read().replace('\n', '')
ctrl = os.popen('cat %s/power/control 2>/dev/null' % \
dirname).read().replace('\n', '')
print('control is %s for %6s: %s' % (ctrl, name, desc))
# Function: yesno
# Description:
# Print out an equivalent Y or N for a set of known parameter values
# Output:
# 'Y', 'N', or ' ' if the value is unknown
def yesno(val):
yesvals = ['auto', 'enabled', 'active', '1']
novals = ['on', 'disabled', 'suspended', 'forbidden', 'unsupported']
if val in yesvals:
return 'Y'
elif val in novals:
return 'N'
return ' '
# Function: ms2nice
# Description:
# Print out a very concise time string in minutes and seconds
# Output:
# The time string, e.g. "1901m16s"
def ms2nice(val):
ms = 0
try:
ms = int(val)
except:
return 0.0
m = ms / 60000
s = (ms / 1000) - (m * 60)
return '%3dm%2ds' % (m, s)
# Function: detectUSB
# Description:
# Detect all the USB hosts and devices currently connected and add
# a list of USB device names to sysvals for better timeline readability
# Arguments:
# output: True to output the info to stdout, False otherwise
def detectUSB(output):
global sysvals
field = {'idVendor':'', 'idProduct':'', 'product':'', 'speed':''}
power = {'async':'', 'autosuspend':'', 'autosuspend_delay_ms':'',
'control':'', 'persist':'', 'runtime_enabled':'',
'runtime_status':'', 'runtime_usage':'',
'runtime_active_time':'',
'runtime_suspended_time':'',
'active_duration':'',
'connected_duration':''}
if(output):
print('LEGEND')
print('---------------------------------------------------------------------------------------------')
print(' A = async/sync PM queue Y/N D = autosuspend delay (seconds)')
print(' S = autosuspend Y/N rACTIVE = runtime active (min/sec)')
print(' P = persist across suspend Y/N rSUSPEN = runtime suspend (min/sec)')
print(' E = runtime suspend enabled/forbidden Y/N ACTIVE = active duration (min/sec)')
print(' R = runtime status active/suspended Y/N CONNECT = connected duration (min/sec)')
print(' U = runtime usage count')
print('---------------------------------------------------------------------------------------------')
print(' NAME ID DESCRIPTION SPEED A S P E R U D rACTIVE rSUSPEN ACTIVE CONNECT')
print('---------------------------------------------------------------------------------------------')
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
for i in field:
field[i] = os.popen('cat %s/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
name = dirname.split('/')[-1]
if(len(field['product']) > 0):
sysvals.altdevname[name] = \
'%s [%s]' % (field['product'], name)
else:
sysvals.altdevname[name] = \
'%s:%s [%s]' % (field['idVendor'], \
field['idProduct'], name)
if(output):
for i in power:
power[i] = os.popen('cat %s/power/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
if(re.match('usb[0-9]*', name)):
first = '%-8s' % name
else:
first = '%8s' % name
print('%s [%s:%s] %-20s %-4s %1s %1s %1s %1s %1s %1s %1s %s %s %s %s' % \
(first, field['idVendor'], field['idProduct'], \
field['product'][0:20], field['speed'], \
yesno(power['async']), \
yesno(power['control']), \
yesno(power['persist']), \
yesno(power['runtime_enabled']), \
yesno(power['runtime_status']), \
power['runtime_usage'], \
power['autosuspend'], \
ms2nice(power['runtime_active_time']), \
ms2nice(power['runtime_suspended_time']), \
ms2nice(power['active_duration']), \
ms2nice(power['connected_duration'])))
# Function: getModes
# Description:
# Determine the supported power modes on this system
# Output:
# A string list of the available modes
def getModes():
global sysvals
modes = ''
if(not sysvals.android):
if(os.path.exists(sysvals.powerfile)):
fp = open(sysvals.powerfile, 'r')
modes = string.split(fp.read())
fp.close()
else:
line = os.popen(sysvals.adb+' shell cat '+\
sysvals.powerfile).read().strip()
modes = string.split(line)
return modes
# Function: getFPDT
# Description:
# Read the acpi bios tables and pull out FPDT, the firmware data
# Arguments:
# output: True to output the info to stdout, False otherwise
def getFPDT(output):
global sysvals
rectype = {}
rectype[0] = 'Firmware Basic Boot Performance Record'
rectype[1] = 'S3 Performance Table Record'
prectype = {}
prectype[0] = 'Basic S3 Resume Performance Record'
prectype[1] = 'Basic S3 Suspend Performance Record'
rootCheck()
if(not os.path.exists(sysvals.fpdtpath)):
if(output):
doError('file doesnt exist: %s' % sysvals.fpdtpath, False)
return False
if(not os.access(sysvals.fpdtpath, os.R_OK)):
if(output):
doError('file isnt readable: %s' % sysvals.fpdtpath, False)
return False
if(not os.path.exists(sysvals.mempath)):
if(output):
doError('file doesnt exist: %s' % sysvals.mempath, False)
return False
if(not os.access(sysvals.mempath, os.R_OK)):
if(output):
doError('file isnt readable: %s' % sysvals.mempath, False)
return False
fp = open(sysvals.fpdtpath, 'rb')
buf = fp.read()
fp.close()
if(len(buf) < 36):
if(output):
doError('Invalid FPDT table data, should '+\
'be at least 36 bytes', False)
return False
table = struct.unpack('4sIBB6s8sI4sI', buf[0:36])
if(output):
print('')
print('Firmware Performance Data Table (%s)' % table[0])
print(' Signature : %s' % table[0])
print(' Table Length : %u' % table[1])
print(' Revision : %u' % table[2])
print(' Checksum : 0x%x' % table[3])
print(' OEM ID : %s' % table[4])
print(' OEM Table ID : %s' % table[5])
print(' OEM Revision : %u' % table[6])
print(' Creator ID : %s' % table[7])
print(' Creator Revision : 0x%x' % table[8])
print('')
if(table[0] != 'FPDT'):
if(output):
doError('Invalid FPDT table')
return False
if(len(buf) <= 36):
return False
i = 0
fwData = [0, 0]
records = buf[36:]
fp = open(sysvals.mempath, 'rb')
while(i < len(records)):
header = struct.unpack('HBB', records[i:i+4])
if(header[0] not in rectype):
continue
if(header[1] != 16):
continue
addr = struct.unpack('Q', records[i+8:i+16])[0]
try:
fp.seek(addr)
first = fp.read(8)
except:
doError('Bad address 0x%x in %s' % (addr, sysvals.mempath), False)
rechead = struct.unpack('4sI', first)
recdata = fp.read(rechead[1]-8)
if(rechead[0] == 'FBPT'):
record = struct.unpack('HBBIQQQQQ', recdata)
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
print(' Reset END : %u ns' % record[4])
print(' OS Loader LoadImage Start : %u ns' % record[5])
print(' OS Loader StartImage Start : %u ns' % record[6])
print(' ExitBootServices Entry : %u ns' % record[7])
print(' ExitBootServices Exit : %u ns' % record[8])
elif(rechead[0] == 'S3PT'):
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
j = 0
while(j < len(recdata)):
prechead = struct.unpack('HBB', recdata[j:j+4])
if(prechead[0] not in prectype):
continue
if(prechead[0] == 0):
record = struct.unpack('IIQQ', recdata[j:j+prechead[1]])
fwData[1] = record[2]
if(output):
print(' %s' % prectype[prechead[0]])
print(' Resume Count : %u' % \
record[1])
print(' FullResume : %u ns' % \
record[2])
print(' AverageResume : %u ns' % \
record[3])
elif(prechead[0] == 1):
record = struct.unpack('QQ', recdata[j+4:j+prechead[1]])
fwData[0] = record[1] - record[0]
if(output):
print(' %s' % prectype[prechead[0]])
print(' SuspendStart : %u ns' % \
record[0])
print(' SuspendEnd : %u ns' % \
record[1])
print(' SuspendTime : %u ns' % \
fwData[0])
j += prechead[1]
if(output):
print('')
i += header[1]
fp.close()
return fwData
# Function: statusCheck
# Description:
# Verify that the requested command and options will work, and
# print the results to the terminal
# Output:
# True if the test will work, False if not
def statusCheck():
global sysvals
status = True
if(sysvals.android):
print('Checking the android system ...')
else:
print('Checking this system (%s)...' % platform.node())
# check if adb is connected to a device
if(sysvals.android):
res = 'NO'
out = os.popen(sysvals.adb+' get-state').read().strip()
if(out == 'device'):
res = 'YES'
print(' is android device connected: %s' % res)
if(res != 'YES'):
print(' Please connect the device before using this tool')
return False
# check we have root access
res = 'NO (No features of this tool will work!)'
if(sysvals.android):
out = os.popen(sysvals.adb+' shell id').read().strip()
if('root' in out):
res = 'YES'
else:
if(os.environ['USER'] == 'root'):
res = 'YES'
print(' have root access: %s' % res)
if(res != 'YES'):
if(sysvals.android):
print(' Try running "adb root" to restart the daemon as root')
else:
print(' Try running this script with sudo')
return False
# check sysfs is mounted
res = 'NO (No features of this tool will work!)'
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls '+\
sysvals.powerfile).read().strip()
if(out == sysvals.powerfile):
res = 'YES'
else:
if(os.path.exists(sysvals.powerfile)):
res = 'YES'
print(' is sysfs mounted: %s' % res)
if(res != 'YES'):
return False
# check target mode is a valid mode
res = 'NO'
modes = getModes()
if(sysvals.suspendmode in modes):
res = 'YES'
else:
status = False
print(' is "%s" a valid power mode: %s' % (sysvals.suspendmode, res))
if(res == 'NO'):
print(' valid power modes are: %s' % modes)
print(' please choose one with -m')
# check if the tool can unlock the device
if(sysvals.android):
res = 'YES'
out1 = os.popen(sysvals.adb+\
' shell dumpsys power | grep mScreenOn').read().strip()
out2 = os.popen(sysvals.adb+\
' shell input').read().strip()
if(not out1.startswith('mScreenOn') or not out2.startswith('usage')):
res = 'NO (wake the android device up before running the test)'
print(' can I unlock the screen: %s' % res)
# check if ftrace is available
res = 'NO'
ftgood = verifyFtrace()
if(ftgood):
res = 'YES'
elif(sysvals.usecallgraph):
status = False
print(' is ftrace supported: %s' % res)
# what data source are we using
res = 'DMESG'
if(ftgood):
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
check = False
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls -d '+\
sysvals.epath+e).read().strip()
if(out == sysvals.epath+e):
check = True
else:
if(os.path.exists(sysvals.epath+e)):
check = True
if(not check):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and check):
sysvals.usetraceevents = True
if(sysvals.usetraceevents and sysvals.usetraceeventsonly):
res = 'FTRACE (all trace events found)'
elif(sysvals.usetraceevents):
res = 'DMESG and FTRACE (suspend_resume trace event found)'
print(' timeline data source: %s' % res)
# check if rtcwake
res = 'NO'
if(sysvals.rtcpath != ''):
res = 'YES'
elif(sysvals.rtcwake):
status = False
print(' is rtcwake supported: %s' % res)
return status
# Function: doError
# Description:
# generic error function for catastrphic failures
# Arguments:
# msg: the error message to print
# help: True if printHelp should be called after, False otherwise
def doError(msg, help):
if(help == True):
printHelp()
print('ERROR: %s\n') % msg
sys.exit()
# Function: doWarning
# Description:
# generic warning function for non-catastrophic anomalies
# Arguments:
# msg: the warning message to print
# file: If not empty, a filename to request be sent to the owner for debug
def doWarning(msg, file):
print('/* %s */') % msg
if(file):
print('/* For a fix, please send this'+\
' %s file to <todd.e.brandt@intel.com> */' % file)
# Function: rootCheck
# Description:
# quick check to see if we have root access
def rootCheck():
if(os.environ['USER'] != 'root'):
doError('This script must be run as root', False)
# Function: getArgInt
# Description:
# pull out an integer argument from the command line with checks
def getArgInt(name, args, min, max):
try:
arg = args.next()
except:
doError(name+': no argument supplied', True)
try:
val = int(arg)
except:
doError(name+': non-integer value given', True)
if(val < min or val > max):
doError(name+': value should be between %d and %d' % (min, max), True)
return val
# Function: rerunTest
# Description:
# generate an output from an existing set of ftrace/dmesg logs
def rerunTest():
global sysvals
if(sysvals.ftracefile != ''):
doesTraceLogHaveTraceEvents()
if(sysvals.dmesgfile == '' and not sysvals.usetraceeventsonly):
doError('recreating this html output '+\
'requires a dmesg file', False)
sysvals.setOutputFile()
vprint('Output file: %s' % sysvals.htmlfile)
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
testruns = parseTraceLog()
else:
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.ftracefile != ''):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runTest
# Description:
# execute a suspend/resume, gather the logs, and generate the output
def runTest(subdir):
global sysvals
# prepare for the test
if(not sysvals.android):
initFtrace()
else:
initFtraceAndroid()
sysvals.initTestOutput(subdir)
vprint('Output files:\n %s' % sysvals.dmesgfile)
if(sysvals.usecallgraph or
sysvals.usetraceevents or
sysvals.usetraceeventsonly):
vprint(' %s' % sysvals.ftracefile)
vprint(' %s' % sysvals.htmlfile)
# execute the test
if(not sysvals.android):
executeSuspend()
else:
executeAndroidSuspend()
# analyze the data and create the html output
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
# data for kernels 3.15 or newer is entirely in ftrace
testruns = parseTraceLog()
else:
# data for kernels older than 3.15 is primarily in dmesg
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.usecallgraph or sysvals.usetraceevents):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runSummary
# Description:
# create a summary of tests in a sub-directory
def runSummary(subdir, output):
global sysvals
# get a list of ftrace output files
files = []
for dirname, dirnames, filenames in os.walk(subdir):
for filename in filenames:
if(re.match('.*_ftrace.txt', filename)):
files.append("%s/%s" % (dirname, filename))
# process the files in order and get an array of data objects
testruns = []
for file in sorted(files):
if output:
print("Test found in %s" % os.path.dirname(file))
sysvals.ftracefile = file
sysvals.dmesgfile = file.replace('_ftrace.txt', '_dmesg.txt')
doesTraceLogHaveTraceEvents()
sysvals.usecallgraph = False
if not sysvals.usetraceeventsonly:
if(not os.path.exists(sysvals.dmesgfile)):
print("Skipping %s: not a valid test input" % file)
continue
else:
if output:
f = os.path.basename(sysvals.ftracefile)
d = os.path.basename(sysvals.dmesgfile)
print("\tInput files: %s and %s" % (f, d))
testdata = loadKernelLog()
data = testdata[0]
parseKernelLog(data)
testdata = [data]
appendIncompleteTraceLog(testdata)
else:
if output:
print("\tInput file: %s" % os.path.basename(sysvals.ftracefile))
testdata = parseTraceLog()
data = testdata[0]
data.normalizeTime(data.tSuspended)
link = file.replace(subdir+'/', '').replace('_ftrace.txt', '.html')
data.outfile = link
testruns.append(data)
createHTMLSummarySimple(testruns, subdir+'/summary.html')
# Function: printHelp
# Description:
# print out the help text
def printHelp():
global sysvals
modes = getModes()
print('')
print('AnalyzeSuspend v%.1f' % sysvals.version)
print('Usage: sudo analyze_suspend.py <options>')
print('')
print('Description:')
print(' This tool is designed to assist kernel and OS developers in optimizing')
print(' their linux stack\'s suspend/resume time. Using a kernel image built')
print(' with a few extra options enabled, the tool will execute a suspend and')
print(' capture dmesg and ftrace data until resume is complete. This data is')
print(' transformed into a device timeline and an optional callgraph to give')
print(' a detailed view of which devices/subsystems are taking the most')
print(' time in suspend/resume.')
print('')
print(' Generates output files in subdirectory: suspend-mmddyy-HHMMSS')
print(' HTML output: <hostname>_<mode>.html')
print(' raw dmesg output: <hostname>_<mode>_dmesg.txt')
print(' raw ftrace output: <hostname>_<mode>_ftrace.txt')
print('')
print('Options:')
print(' [general]')
print(' -h Print this help text')
print(' -v Print the current tool version')
print(' -verbose Print extra information during execution and analysis')
print(' -status Test to see if the system is enabled to run this tool')
print(' -modes List available suspend modes')
print(' -m mode Mode to initiate for suspend %s (default: %s)') % (modes, sysvals.suspendmode)
print(' -rtcwake t Use rtcwake to autoresume after <t> seconds (default: disabled)')
print(' [advanced]')
print(' -f Use ftrace to create device callgraphs (default: disabled)')
print(' -filter "d1 d2 ..." Filter out all but this list of dev names')
print(' -x2 Run two suspend/resumes back to back (default: disabled)')
print(' -x2delay t Minimum millisecond delay <t> between the two test runs (default: 0 ms)')
print(' -postres t Time after resume completion to wait for post-resume events (default: 0 S)')
print(' -multi n d Execute <n> consecutive tests at <d> seconds intervals. The outputs will')
print(' be created in a new subdirectory with a summary page.')
print(' [utilities]')
print(' -fpdt Print out the contents of the ACPI Firmware Performance Data Table')
print(' -usbtopo Print out the current USB topology with power info')
print(' -usbauto Enable autosuspend for all connected USB devices')
print(' [android testing]')
print(' -adb binary Use the given adb binary to run the test on an android device.')
print(' The device should already be connected and with root access.')
print(' Commands will be executed on the device using "adb shell"')
print(' [re-analyze data from previous runs]')
print(' -ftrace ftracefile Create HTML output using ftrace input')
print(' -dmesg dmesgfile Create HTML output using dmesg (not needed for kernel >= 3.15)')
print(' -summary directory Create a summary of all test in this dir')
print('')
return True
# ----------------- MAIN --------------------
# exec start (skipped if script is loaded as library)
if __name__ == '__main__':
cmd = ''
cmdarg = ''
multitest = {'run': False, 'count': 0, 'delay': 0}
# loop through the command line arguments
args = iter(sys.argv[1:])
for arg in args:
if(arg == '-m'):
try:
val = args.next()
except:
doError('No mode supplied', True)
sysvals.suspendmode = val
elif(arg == '-adb'):
try:
val = args.next()
except:
doError('No adb binary supplied', True)
if(not os.path.exists(val)):
doError('file doesnt exist: %s' % val, False)
if(not os.access(val, os.X_OK)):
doError('file isnt executable: %s' % val, False)
try:
check = os.popen(val+' version').read().strip()
except:
doError('adb version failed to execute', False)
if(not re.match('Android Debug Bridge .*', check)):
doError('adb version failed to execute', False)
sysvals.adb = val
sysvals.android = True
elif(arg == '-x2'):
if(sysvals.postresumetime > 0):
doError('-x2 is not compatible with -postres', False)
sysvals.execcount = 2
elif(arg == '-x2delay'):
sysvals.x2delay = getArgInt('-x2delay', args, 0, 60000)
elif(arg == '-postres'):
if(sysvals.execcount != 1):
doError('-x2 is not compatible with -postres', False)
sysvals.postresumetime = getArgInt('-postres', args, 0, 3600)
elif(arg == '-f'):
sysvals.usecallgraph = True
elif(arg == '-modes'):
cmd = 'modes'
elif(arg == '-fpdt'):
cmd = 'fpdt'
elif(arg == '-usbtopo'):
cmd = 'usbtopo'
elif(arg == '-usbauto'):
cmd = 'usbauto'
elif(arg == '-status'):
cmd = 'status'
elif(arg == '-verbose'):
sysvals.verbose = True
elif(arg == '-v'):
print("Version %.1f" % sysvals.version)
sys.exit()
elif(arg == '-rtcwake'):
sysvals.rtcwake = True
sysvals.rtcwaketime = getArgInt('-rtcwake', args, 0, 3600)
elif(arg == '-multi'):
multitest['run'] = True
multitest['count'] = getArgInt('-multi n (exec count)', args, 2, 1000000)
multitest['delay'] = getArgInt('-multi d (delay between tests)', args, 0, 3600)
elif(arg == '-dmesg'):
try:
val = args.next()
except:
doError('No dmesg file supplied', True)
sysvals.notestrun = True
sysvals.dmesgfile = val
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s doesnt exist' % sysvals.dmesgfile, False)
elif(arg == '-ftrace'):
try:
val = args.next()
except:
doError('No ftrace file supplied', True)
sysvals.notestrun = True
sysvals.usecallgraph = True
sysvals.ftracefile = val
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s doesnt exist' % sysvals.ftracefile, False)
elif(arg == '-summary'):
try:
val = args.next()
except:
doError('No directory supplied', True)
cmd = 'summary'
cmdarg = val
sysvals.notestrun = True
if(os.path.isdir(val) == False):
doError('%s isnt accesible' % val, False)
elif(arg == '-filter'):
try:
val = args.next()
except:
doError('No devnames supplied', True)
sysvals.setDeviceFilter(val)
elif(arg == '-h'):
printHelp()
sys.exit()
else:
doError('Invalid argument: '+arg, True)
# just run a utility command and exit
if(cmd != ''):
if(cmd == 'status'):
statusCheck()
elif(cmd == 'fpdt'):
if(sysvals.android):
doError('cannot read FPDT on android device', False)
getFPDT(True)
elif(cmd == 'usbtopo'):
if(sysvals.android):
doError('cannot read USB topology '+\
'on an android device', False)
detectUSB(True)
elif(cmd == 'modes'):
modes = getModes()
print modes
elif(cmd == 'usbauto'):
setUSBDevicesAuto()
elif(cmd == 'summary'):
print("Generating a summary of folder \"%s\"" % cmdarg)
runSummary(cmdarg, True)
sys.exit()
# run test on android device
if(sysvals.android):
if(sysvals.usecallgraph):
doError('ftrace (-f) is not yet supported '+\
'in the android kernel', False)
if(sysvals.notestrun):
doError('cannot analyze test files on the '+\
'android device', False)
# if instructed, re-analyze existing data files
if(sysvals.notestrun):
rerunTest()
sys.exit()
# verify that we can run a test
if(not statusCheck()):
print('Check FAILED, aborting the test run!')
sys.exit()
if multitest['run']:
# run multiple tests in a separte subdirectory
s = 'x%d' % multitest['count']
subdir = datetime.now().strftime('suspend-'+s+'-%m%d%y-%H%M%S')
os.mkdir(subdir)
for i in range(multitest['count']):
if(i != 0):
print('Waiting %d seconds...' % (multitest['delay']))
time.sleep(multitest['delay'])
print('TEST (%d/%d) START' % (i+1, multitest['count']))
runTest(subdir)
print('TEST (%d/%d) COMPLETE' % (i+1, multitest['count']))
runSummary(subdir, False)
else:
# run the test in the current directory
runTest(".")
|
agry/NGECore2 | refs/heads/master | scripts/object/tangible/ship/components/armor/armor_subpro_light_plasteel.py | 85615 | import sys
def setup(core, object):
return |
agry/NGECore2 | refs/heads/master | scripts/object/tangible/food/generic/dish_bivoli_tempari.py | 85615 | import sys
def setup(core, object):
return |
patrickm/chromium.src | refs/heads/nw | chrome/test/functional/nacl_sdk.py | 66 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import ctypes
from distutils import version
import fnmatch
import glob
import hashlib
import logging
import os
import platform
import re
import shutil
import subprocess
import sys
import tempfile
import urllib2
import xml.dom.minidom
import zipfile
import pyauto_functional # Must be imported before pyauto.
import pyauto
import pyauto_utils
import test_utils
class NaClSDKTest(pyauto.PyUITest):
"""Tests for the NaCl SDK."""
_isExamplesTest = False
_extracted_sdk_path = None
_temp_dir = None
_updated_pepper_versions = []
_latest_updated_pepper_versions = []
_settings = {
'post_sdk_download_url': 'http://code.google.com/chrome/nativeclient/'
'docs/download.html',
'post_sdk_zip': 'http://storage.googleapis.com/'
'nativeclient-mirror/nacl/nacl_sdk/nacl_sdk.zip',
'min_required_chrome_build': 14,
}
def tearDown(self):
pyauto.PyUITest.tearDown(self)
if not self._isExamplesTest:
self._RemoveDownloadedTestFile()
def testNaClSDK(self):
"""Verify that NaCl SDK is working properly."""
if not self._HasAllSystemRequirements():
logging.info('System does not meet the requirements.')
return
self._extracted_sdk_path = tempfile.mkdtemp()
self._VerifyDownloadLinks()
self._VerifyNaClSDKInstaller()
self._VerifyInstall()
self._VerifyUpdate()
self._LaunchServerAndVerifyExamplesAllPepperVersions()
def NaClSDKExamples(self):
"""Verify if NaCl SDK examples are working."""
self._isExamplesTest = True
nacl_sdk_root = os.environ.get('NACL_SDK_ROOT', None)
pepper_version = os.environ.get('PEPPER_VER', None)
if nacl_sdk_root and pepper_version:
self._LaunchServerAndVerifyExamples('pepper_' + pepper_version,
nacl_sdk_root)
else:
self.fail(msg='Missing pepper version to be checked or SDK path.')
def _VerifyDownloadLinks(self):
"""Verify the download links.
Simply verify that NaCl download links exist in html page.
"""
html = None
for i in xrange(3):
try:
html = urllib2.urlopen(self._settings['post_sdk_download_url']).read()
break
except:
pass
self.assertTrue(html,
msg='Cannot open URL: %s' %
self._settings['post_sdk_download_url'])
sdk_url = self._settings['post_sdk_zip']
self.assertTrue(sdk_url in html,
msg='Missing SDK download URL: %s' % sdk_url)
def _VerifyNaClSDKInstaller(self):
"""Verify NaCl SDK installer."""
search_list = [
'sdk_cache/',
'sdk_tools/',
]
mac_lin_additional_search_items = [
'naclsdk',
]
win_additional_search_items = [
'naclsdk.bat'
]
self._DownloadNaClSDK()
self._ExtractNaClSDK()
if pyauto.PyUITest.IsWin():
self._SearchNaClSDKFile(
search_list + win_additional_search_items)
elif pyauto.PyUITest.IsMac() or pyauto.PyUITest.IsLinux():
self._SearchNaClSDKFile(
search_list + mac_lin_additional_search_items)
else:
self.fail(msg='NaCl SDK does not support this OS.')
def _VerifyInstall(self):
"""Install NACL sdk."""
# Executing naclsdk(.bat) list
if pyauto.PyUITest.IsWin():
source_file = os.path.join(
self._extracted_sdk_path, 'nacl_sdk', 'naclsdk.bat')
elif pyauto.PyUITest.IsMac() or pyauto.PyUITest.IsLinux():
source_file = os.path.join(
self._extracted_sdk_path, 'nacl_sdk', 'naclsdk')
subprocess.call(['chmod', '-R', '755', self._extracted_sdk_path])
else:
self.fail(msg='NaCl SDK does not support this OS.')
subprocess.Popen([source_file, 'list'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
def _VerifyUpdate(self):
"""Update NACL sdk"""
# Executing naclsdk(.bat) update
if pyauto.PyUITest.IsWin():
source_file = os.path.join(self._extracted_sdk_path, 'nacl_sdk',
'naclsdk.bat')
elif pyauto.PyUITest.IsMac() or pyauto.PyUITest.IsLinux():
source_file = os.path.join(self._extracted_sdk_path, 'nacl_sdk',
'naclsdk')
else:
self.fail(msg='NaCl SDK does not support this OS.')
# Executing nacl_sdk(.bat) update to get the latest version.
updated_output = subprocess.Popen([source_file, 'update'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0]
self._updated_pepper_versions.extend(
re.findall('Updating bundle (pepper_[0-9]{2})', updated_output))
self._updated_pepper_versions = list(set(self._updated_pepper_versions))
self._updated_pepper_versions.sort(key=str.lower)
updated_pepper_versions_len = len(self._updated_pepper_versions)
self._latest_updated_pepper_versions = filter(
lambda x: x >= 'pepper_18', self._updated_pepper_versions)
def _GetURLForExampleName(self, name, toolchain):
return 'http://localhost:5103/%s/index_%s.html' % (name, toolchain)
def _GetExampleNamesAndURLs(self, examples_path):
"""Get a list of all examples as (name, url) tuples.
Args:
examples_path: The path to the examples directory in the NaCl SDK.
"""
toolchains = ['newlib', 'glibc', 'pnacl']
examples = []
for toolchain in toolchains:
for example in os.listdir(examples_path):
html_path = os.path.join(examples_path, example,
'index_%s.html' % (toolchain,))
if os.path.exists(html_path):
example_url = self._GetURLForExampleName(example, toolchain)
examples.append((example, example_url))
return examples
def _LaunchServerAndVerifyExamplesAllPepperVersions(self):
for pepper_version in self._latest_updated_pepper_versions:
pepper_path = os.path.join(self._extracted_sdk_path,
'nacl_sdk', 'pepper_' + str(pepper_version))
self._LaunchServerAndVerifyExamples(pepper_version, pepper_path)
def _LaunchServerAndVerifyExamples(self, pepper_version, pepper_path):
"""Start local HTTP server and verify examples."""
if self._ChromeAndPepperVersionMatch(pepper_version):
# Close server if it's already open.
if self._IsURLAlive('http://localhost:5103'):
self._CloseHTTPServer()
examples_path = os.path.join(pepper_path, 'examples')
# Launch local http server.
proc = subprocess.Popen(['make RUN'], shell=True, cwd=examples_path)
self.WaitUntil(
lambda: self._IsURLAlive('http://localhost:5103'),
timeout=150, retry_sleep=1)
examples = self._GetExampleNamesAndURLs(examples_path)
try:
self._OpenExamplesAndStartTest(examples)
finally:
self._CloseHTTPServer(proc)
else:
self.pprint('Pepper Version %s does not match the Chrome version %s.'
% (pepper_version,
self.GetBrowserInfo()['properties']['ChromeVersion']))
def _ChromeAndPepperVersionMatch(self, pepper_version='pepper_18'):
"""Determine if chrome and pepper version match"""
version_number = re.findall('pepper_([0-9]{2})', pepper_version)
browser_info = self.GetBrowserInfo()
chrome_version = browser_info['properties']['ChromeVersion']
chrome_build = int(chrome_version.split('.')[0])
return int(chrome_build) == int(version_number[0])
def _RemoveDownloadedTestFile(self):
"""Delete downloaded files and dirs from downloads directory."""
if self._extracted_sdk_path and os.path.exists(self._extracted_sdk_path):
self._CloseHTTPServer()
def _RemoveFile():
shutil.rmtree(self._extracted_sdk_path, ignore_errors=True)
return os.path.exists(self._extracted_sdk_path)
success = self.WaitUntil(_RemoveFile, retry_sleep=2,
expect_retval=False)
self.assertTrue(success,
msg='Cannot remove %s' % self._extracted_sdk_path)
if self._temp_dir:
pyauto_utils.RemovePath(self._temp_dir)
def _OpenExamplesAndStartTest(self, examples):
"""Open each example and verify that it's working.
Args:
examples: A list of example (name, url) tuples.
"""
example_verify_funcs = {
'dlopen': self._VerifyDynamicLibraryOpen,
'file_io': self._VerifyFileIoExample,
'geturl': self._VerifyGetURLExample,
'input_events': self._VerifyInputEventsExample,
'load_progress': self._VerifyLoadProgressExample,
'mt_input_events': self._VerifyMultithreadedInputEventsExample,
'pi_generator': self._VerifyPiGeneratorExample,
'sine_synth': self._VerifySineSynthExample,
'websocket': self._VerifyWebSocketExample,
}
# Remove examples that we don't yet verify
examples = [(name, url) for name, url in examples
if name in example_verify_funcs]
# Open all examples.
for name, url in examples:
self.AppendTab(pyauto.GURL(url))
self._CheckForCrashes()
# Verify all examples are working.
for name, url in examples:
self._VerifyAnExample(name, url, example_verify_funcs[name])
self._CheckForCrashes()
# Close each tab and check for crashes.
tab_count = self.GetTabCount()
for index in xrange(tab_count - 1, 0, -1):
self.CloseTab(tab_index=index)
self._CheckForCrashes()
def _VerifyAnExample(self, name, url, verify_func):
"""Verify NaCl example is working.
Args:
name: A string name of the example.
url: A string url of the example.
verify_func: The function to verify the example.
Takes (tab_index, name, url) as parameters.
"""
if not verify_func:
self.fail(msg='No test available for %s.' % name)
info = self.GetBrowserInfo()
tabs = info['windows'][0]['tabs']
tab_index = None
for tab in tabs:
if url == tab['url']:
self.ActivateTab(tab['index'])
tab_index = tab['index']
break
if tab_index:
verify_func(tab_index, name, url)
def _VerifyElementPresent(self, element_id, expected_value, tab_index, msg,
attribute='innerHTML', timeout=150):
"""Determine if dom element has the expected value.
Args:
element_id: Dom element's id.
expected_value: String to be matched against the Dom element.
tab_index: Tab index to work on.
attribute: Attribute to match |expected_value| against, if
given. Defaults to 'innerHTML'.
timeout: The max timeout (in secs) for which to wait.
"""
js_code = """
var output = document.getElementById('%s').%s;
var result;
if (output.indexOf('%s') != -1)
result = 'pass';
else
result = 'fail';
window.domAutomationController.send(result);
""" % (element_id, attribute, expected_value)
success = self.WaitUntil(
lambda: self.ExecuteJavascript(js_code, tab_index),
timeout=timeout, expect_retval='pass')
self.assertTrue(success, msg=msg)
def _CreateJSToSimulateMouseclick(self):
"""Create javascript to simulate mouse click event."""
js_code = """
var rightClick = document.createEvent('MouseEvents');
rightClick.initMouseEvent(
'mousedown', true, true, document,
1, 32, 121, 10, 100,
false, false, false, false,
2, common.naclModule
);
common.naclModule.dispatchEvent(rightClick);
window.domAutomationController.send('done');
"""
return js_code
def _VerifyInputEventsExample(self, tab_index, name, url):
"""Verify Input Events Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
success = self._VerifyElementPresent('eventString', 'DidChangeView',
tab_index, msg='Example %s failed. URL: %s' % (name, url))
# Simulate mouse click on event module.
js_code = self._CreateJSToSimulateMouseclick()
self.ExecuteJavascript(js_code, tab_index)
# Check if 'eventString' has handled above mouse click.
success = self.WaitUntil(
lambda: re.search('DidHandleInputEvent', self.GetDOMValue(
'document.getElementById("eventString").innerHTML',
tab_index)).group(), expect_retval='DidHandleInputEvent')
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _VerifyMultithreadedInputEventsExample(self, tab_index, name, url):
"""Verify Input Events Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
success = self.WaitUntil(
lambda: bool(self.GetDOMValue(
'document.getElementById("eventString").innerHTML',
tab_index).find('DidChangeView') + 1))
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
# Simulate mouse click on event module.
js_code = self._CreateJSToSimulateMouseclick()
self.ExecuteJavascript(js_code, tab_index)
# Check if above mouse click is handled.
success = self._VerifyElementPresent('eventString', 'Mouse event',
tab_index, msg='Example %s failed. URL: %s' % (name, url))
# Kill worker thread and queue
js_code = """
document.getElementsByTagName('button')[0].click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
# Check if main thread has cancelled queue.
success = self._VerifyElementPresent('eventString', 'Received cancel',
tab_index, msg='Example %s failed. URL: %s' % (name, url))
# Simulate mouse click on event module.
js_code = self._CreateJSToSimulateMouseclick()
self.ExecuteJavascript(js_code, tab_index)
# Check if above mouse click is not handled after killing worker thread.
def _CheckMouseClickEventStatus():
return self.GetDOMValue(
'document.getElementById("eventString").innerHTML',
tab_index).find('Mouse event', self.GetDOMValue(
'document.getElementById("eventString").innerHTML', tab_index).find(
'Received cancel'))
success = self.WaitUntil(_CheckMouseClickEventStatus, expect_retval=-1)
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _VerifyFileIoExample(self, tab_index, name, url):
"""Verify File IO Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
def _CheckStatus(substring_expected, fail_msg):
self.assertTrue(
self.WaitUntil(
lambda: self.GetDOMValue(
'document.getElementById("statusField").innerHTML', tab_index)\
.find(substring_expected) != -1, expect_retval=True),
msg='Example %s failed. URL: %s. Reason: %s' % (name, url, fail_msg))
# Give permission to use file system by clicking infobar OK
infobar_index = test_utils.WaitForInfobarTypeAndGetIndex(self,
'confirm_infobar', 0, tab_index)
self.PerformActionOnInfobar('accept', infobar_index, 0, tab_index)
_CheckStatus('Ready!', 'NaCl module load')
# Check that deleting non-existing files gives file not found
js_code = """
document.getElementById('file_name').value = '/abc';
document.getElementById('file_editor').value = 'test';
document.getElementById('delete_but').click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
_CheckStatus('File not found', 'Delete non-existing')
# Check that saving works
js_code = """
document.getElementById('save_but').click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
_CheckStatus('Save successful', 'Save test')
# Check that we load what we saved
js_code = """
document.getElementById('file_editor').value = 'different';
document.getElementById('load_but').click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
_CheckStatus('Load complete', 'Load test')
self.assertTrue(
self.GetDOMValue('document.getElementById("file_editor").value',
tab_index).find('test') != -1, msg='Loaded wrong text or failed')
# Check that we delete files successfully
js_code = """
document.getElementById('delete_but').click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
_CheckStatus('File deleted', 'Delete test')
# Check that file is deleted and load produces not found
js_code = """
document.getElementById('load_but').click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
_CheckStatus('File not found', 'Load deleted test')
def _VerifyWebSocketExample(self, tab_index, name, url):
"""Verify Web Socket Open Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
# Check if example is loaded.
success = self.WaitUntil(
lambda: self.GetDOMValue(
'document.getElementById("statusField").innerHTML', tab_index),
expect_retval='SUCCESS')
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
# Simulate clicking on Connect button to establish a connection.
js_code = """
document.getElementsByTagName('input')[1].click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
# Check if connected
success = self._VerifyElementPresent('log', 'connected', tab_index,
msg='Example %s failed. URL: %s' % (name, url))
# Simulate clicking on Send button to send text message in log.
js_code = """
document.getElementsByTagName('input')[3].click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
success = self.WaitUntil(
lambda: bool(re.search('send:', self.GetDOMValue(
'document.getElementById("log").textContent', tab_index))))
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _VerifyDynamicLibraryOpen(self, tab_index, name, url):
"""Verify Dynamic Library Open Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
# Check if example is loaded.
success = self._VerifyElementPresent('log', 'Eightball loaded!',
tab_index, msg='Example %s failed. URL: %s' % (name, url))
# Simulate clicking on ASK button and check answer log for desired answer.
js_code = """
document.getElementsByTagName('input')[1].click();
window.domAutomationController.send('done');
"""
self.ExecuteJavascript(js_code, tab_index)
def _CheckAnswerLog():
return bool(re.search(r'NO|YES|42|MAYBE NOT|DEFINITELY|'
'ASK ME TOMORROW|MAYBE|PARTLY CLOUDY',
self.GetDOMValue('document.getElementById("log").innerHTML',
tab_index)))
success = self.WaitUntil(_CheckAnswerLog)
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _VerifyLoadProgressExample(self, tab_index, name, url):
"""Verify Dynamic Library Open Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
# Check if example loads and displays loading progress.
success = self.WaitUntil(
lambda: self.GetDOMValue(
'document.getElementById("statusField").innerHTML', tab_index),
timeout=150, expect_retval='SUCCESS')
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _CheckLoadProgressStatus():
return re.search(
r'(loadstart).+(progress:).+(load).+(loadend).+(lastError:)',
self.GetDOMValue(
'document.getElementById("log").innerHTML', tab_index))
success = self.WaitUntil(_CheckLoadProgressStatus)
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _VerifyPiGeneratorExample(self, tab_index, name, url):
"""Verify Pi Generator Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
success = self.WaitUntil(
lambda: self.GetDOMValue('document.getElementById("pi").value',
tab_index)[0:3],
expect_retval='3.1')
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
def _VerifySineSynthExample(self, tab_index, name, url):
"""Verify Sine Wave Synthesizer Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
success = self.WaitUntil(
lambda: self.GetDOMValue(
'document.getElementById("frequency_field").value',
tab_index), timeout=150, expect_retval='440')
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
self.ExecuteJavascript(
'document.body.getElementsByTagName("button")[0].click();'
'window.domAutomationController.send("done")',
tab_index)
def _VerifyGetURLExample(self, tab_index, name, url):
"""Verify GetURL Example.
Args:
tab_index: Tab index integer that the example is on.
name: A string name of the example.
url: A string url of the example.
"""
success = self.WaitUntil(
lambda: self.GetDOMValue(
'document.getElementById("statusField").innerHTML',
tab_index), timeout=150, expect_retval='SUCCESS')
self.assertTrue(success, msg='Example %s failed. URL: %s' % (name, url))
self.ExecuteJavascript(
'document.getElementById("button").click();'
'window.domAutomationController.send("done")',
tab_index)
success = self._VerifyElementPresent('general_output', 'test passed',
tab_index, msg='Example %s failed. URL: %s' % (name, url))
def _CheckForCrashes(self):
"""Check for any browser/tab crashes and hangs."""
self.assertTrue(self.GetBrowserWindowCount(),
msg='Browser crashed, no window is open.')
info = self.GetBrowserInfo()
breakpad_folder = info['properties']['DIR_CRASH_DUMPS']
old_dmp_files = glob.glob(os.path.join(breakpad_folder, '*.dmp'))
# Verify there're no crash dump files.
for dmp_file in glob.glob(os.path.join(breakpad_folder, '*.dmp')):
self.assertTrue(dmp_file in old_dmp_files,
msg='Crash dump %s found' % dmp_file)
# Check for any crashed tabs.
tabs = info['windows'][0]['tabs']
for tab in tabs:
if tab['url'] != 'about:blank':
if not self.GetDOMValue('document.body.innerHTML', tab['index']):
self.fail(msg='Tab crashed on %s' % tab['url'])
def _GetPlatformArchitecture(self):
"""Get platform architecture.
Returns:
A string representing the platform architecture.
"""
if pyauto.PyUITest.IsWin():
if os.environ['PROGRAMFILES'] == 'C:\\Program Files (x86)':
return '64bit'
else:
return '32bit'
elif pyauto.PyUITest.IsMac() or pyauto.PyUITest.IsLinux():
if platform.machine() == 'x86_64':
return '64bit'
else:
return '32bit'
return '32bit'
def _HasPathInTree(self, pattern, is_file, root=os.curdir):
"""Recursively checks if a file/directory matching a pattern exists.
Args:
pattern: Pattern of file or directory name.
is_file: True if looking for file, or False if looking for directory.
root: Directory to start looking.
Returns:
True, if root contains the directory name pattern, or
False otherwise.
"""
for path, dirs, files in os.walk(os.path.abspath(root)):
if is_file:
if len(fnmatch.filter(files, pattern)):
return True
else:
if len(fnmatch.filter(dirs, pattern)):
return True
return False
def _HasAllSystemRequirements(self):
"""Verify NaCl SDK installation system requirements.
Returns:
True, if system passed requirements, or
False otherwise.
"""
# Check python version.
if sys.version_info[0:2] < (2, 6):
return False
# Check OS requirements.
if pyauto.PyUITest.IsMac():
mac_min_version = version.StrictVersion('10.6')
mac_version = version.StrictVersion(platform.mac_ver()[0])
if mac_version < mac_min_version:
return False
elif pyauto.PyUITest.IsWin():
if not (self.IsWin7() or self.IsWinVista() or self.IsWinXP()):
return False
elif pyauto.PyUITest.IsLinux():
pass # TODO(chrisphan): Check Lin requirements.
else:
return False
# Check for Chrome version compatibility.
# NaCl supports Chrome 10 and higher builds.
min_required_chrome_build = self._settings['min_required_chrome_build']
browser_info = self.GetBrowserInfo()
chrome_version = browser_info['properties']['ChromeVersion']
chrome_build = int(chrome_version.split('.')[0])
return chrome_build >= min_required_chrome_build
def _DownloadNaClSDK(self):
"""Download NaCl SDK."""
self._temp_dir = tempfile.mkdtemp()
dl_file = urllib2.urlopen(self._settings['post_sdk_zip'])
file_path = os.path.join(self._temp_dir, 'nacl_sdk.zip')
try:
f = open(file_path, 'wb')
f.write(dl_file.read())
except IOError:
self.fail(msg='Cannot open %s.' % file_path)
finally:
f.close()
def _ExtractNaClSDK(self):
"""Extract NaCl SDK."""
source_file = os.path.join(self._temp_dir, 'nacl_sdk.zip')
if zipfile.is_zipfile(source_file):
zip = zipfile.ZipFile(source_file, 'r')
zip.extractall(self._extracted_sdk_path)
else:
self.fail(msg='%s is not a valid zip file' % source_file)
def _IsURLAlive(self, url):
"""Test if URL is alive."""
try:
urllib2.urlopen(url)
except:
return False
return True
def _CloseHTTPServer(self, proc=None):
"""Close HTTP server.
Args:
proc: Process that opened the HTTP server.
proc is None when there is no pointer to HTTP server process.
"""
if not self._IsURLAlive('http://localhost:5103'):
return
response = urllib2.urlopen('http://localhost:5103')
html = response.read()
if not 'Native Client' in html:
self.fail(msg='Port 5103 is in use.')
urllib2.urlopen('http://localhost:5103?quit=1')
success = self.WaitUntil(
lambda: self._IsURLAlive('http://localhost:5103'),
retry_sleep=1, expect_retval=False)
if not success:
if not proc:
self.fail(msg='Failed to close HTTP server.')
else:
if proc.poll() == None:
try:
proc.kill()
except:
self.fail(msg='Failed to close HTTP server.')
def _SearchNaClSDKFile(self, search_list):
"""Search NaCl SDK file for example files and directories in Windows.
Args:
search_list: A list of strings, representing file and
directory names for which to search.
"""
missing_items = []
for name in search_list:
is_file = name.find('/') < 0
if not is_file:
name = name.replace('/', '')
if not self._HasPathInTree(name, is_file, self._extracted_sdk_path):
missing_items.append(name)
self.assertEqual(len(missing_items), 0,
msg='Missing files or directories: %s' %
', '.join(map(str, missing_items)))
def ExtraChromeFlags(self):
"""Ensures Nacl is enabled.
Returns:
A list of extra flags to pass to Chrome when it is launched.
"""
extra_chrome_flags = [
'--enable-nacl',
'--enable-nacl-exception-handling',
'--nacl-gdb',
]
return pyauto.PyUITest.ExtraChromeFlags(self) + extra_chrome_flags
if __name__ == '__main__':
pyauto_functional.Main()
|
arielalmendral/ert | refs/heads/master | python/python/ert/test/test_area.py | 2 | # Copyright (C) 2013 Statoil ASA, Norway.
#
# The file 'test_work_area.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
import os.path
from cwrap import BaseCClass
from ert.util import UtilPrototype
class TestArea(BaseCClass):
_test_area_alloc = UtilPrototype("void* test_work_area_alloc( char* )" , bind = False)
_test_area_alloc_relative = UtilPrototype("void* test_work_area_alloc_relative( char* , char* )" , bind = False)
_free = UtilPrototype("void test_work_area_free( test_area )")
_install_file = UtilPrototype("void test_work_area_install_file( test_area , char* )")
_copy_directory = UtilPrototype("void test_work_area_copy_directory( test_area , char* )")
_copy_file = UtilPrototype("void test_work_area_copy_file( test_area , char* )")
_copy_directory_content = UtilPrototype("void test_work_area_copy_directory_content( test_area , char* )")
_copy_parent_directory = UtilPrototype("void test_work_area_copy_parent_directory( test_area , char* )")
_copy_parent_content = UtilPrototype("void test_work_area_copy_parent_content( test_area , char* )")
_get_cwd = UtilPrototype("char* test_work_area_get_cwd( test_area )")
_get_original_cwd = UtilPrototype("char* test_work_area_get_original_cwd( test_area )")
_set_store = UtilPrototype("void test_work_area_set_store( test_area , bool)")
_sync = UtilPrototype("void test_work_area_sync( test_area )")
def __init__(self, test_name, prefix = None , store_area=False , c_ptr = None):
if c_ptr is None:
if prefix:
if os.path.exists( prefix ):
c_ptr = self._test_area_alloc_relative(prefix , test_name)
else:
raise IOError("The prefix path:%s must exist" % prefix)
else:
c_ptr = self._test_area_alloc(test_name)
super(TestArea, self).__init__(c_ptr)
self.set_store( store_area )
def get_original_cwd(self):
return self._get_original_cwd()
def get_cwd(self):
return self._get_cwd()
def orgPath(self , path):
if os.path.isabs( path ):
return path
else:
return os.path.abspath( os.path.join( self.get_original_cwd( ) , path ) )
# All the methods install_file() , copy_directory(),
# copy_parent_directory(), copy_parent_content(),
# copy_directory_content() and copy_file() expect an input
# argument which is relative to the original CWD - or absolute.
def install_file( self, filename):
if os.path.isfile(self.orgPath(filename)):
self._install_file(filename)
else:
raise IOError("No such file:%s" % filename)
def copy_directory( self, directory):
if os.path.isdir( self.orgPath(directory) ):
self._copy_directory(directory)
else:
raise IOError("No such directory: %s" % directory)
def copy_parent_directory( self , path):
if os.path.exists( self.orgPath(path) ):
self._copy_parent_directory( path)
else:
raise IOError("No such file or directory: %s" % path)
def copy_parent_content( self , path):
if os.path.exists( self.orgPath(path) ):
self._copy_parent_content(path)
else:
raise IOError("No such file or directory: %s" % path)
def copy_directory_content( self, directory):
if os.path.isdir( self.orgPath(directory) ):
self._copy_directory_content(directory)
else:
raise IOError("No such directory: %s" % directory )
def copy_file( self, filename):
if os.path.isfile( self.orgPath(filename) ):
self._copy_file(filename)
else:
raise IOError("No such file:%s" % filename)
def free(self):
self._free()
def set_store(self, store):
self._set_store(store)
def getFullPath(self , path):
if not os.path.exists( path ):
raise IOError("Path not found:%s" % path)
if os.path.isabs( path ):
raise IOError("Path:%s is already absolute" % path)
return os.path.join( self.get_cwd() , path )
def sync(self):
return self._sync( )
class TestAreaContext(object):
def __init__(self, test_name, prefix = None , store_area=False):
self.test_name = test_name
self.store_area = store_area
self.prefix = prefix
def __enter__(self):
"""
@rtype: TestArea
"""
self.test_area = TestArea(self.test_name, prefix = self.prefix , store_area = self.store_area )
return self.test_area
def __exit__(self, exc_type, exc_val, exc_tb):
del self.test_area
return False
|
tinchoss/Python_Android | refs/heads/master | python/src/Tools/scripts/xxci.py | 94 | #! /usr/bin/env python
# xxci
#
# check in files for which rcsdiff returns nonzero exit status
import sys
import os
from stat import *
import fnmatch
EXECMAGIC = '\001\140\000\010'
MAXSIZE = 200*1024 # Files this big must be binaries and are skipped.
def getargs():
args = sys.argv[1:]
if args:
return args
print 'No arguments, checking almost *, in "ls -t" order'
list = []
for file in os.listdir(os.curdir):
if not skipfile(file):
list.append((getmtime(file), file))
list.sort()
if not list:
print 'Nothing to do -- exit 1'
sys.exit(1)
list.sort()
list.reverse()
for mtime, file in list: args.append(file)
return args
def getmtime(file):
try:
st = os.stat(file)
return st[ST_MTIME]
except os.error:
return -1
badnames = ['tags', 'TAGS', 'xyzzy', 'nohup.out', 'core']
badprefixes = ['.', ',', '@', '#', 'o.']
badsuffixes = \
['~', '.a', '.o', '.old', '.bak', '.orig', '.new', '.prev', '.not', \
'.pyc', '.fdc', '.rgb', '.elc', ',v']
ignore = []
def setup():
ignore[:] = badnames
for p in badprefixes:
ignore.append(p + '*')
for p in badsuffixes:
ignore.append('*' + p)
try:
f = open('.xxcign', 'r')
except IOError:
return
ignore[:] = ignore + f.read().split()
def skipfile(file):
for p in ignore:
if fnmatch.fnmatch(file, p): return 1
try:
st = os.lstat(file)
except os.error:
return 1 # Doesn't exist -- skip it
# Skip non-plain files.
if not S_ISREG(st[ST_MODE]): return 1
# Skip huge files -- probably binaries.
if st[ST_SIZE] >= MAXSIZE: return 1
# Skip executables
try:
data = open(file, 'r').read(len(EXECMAGIC))
if data == EXECMAGIC: return 1
except:
pass
return 0
def badprefix(file):
for bad in badprefixes:
if file[:len(bad)] == bad: return 1
return 0
def badsuffix(file):
for bad in badsuffixes:
if file[-len(bad):] == bad: return 1
return 0
def go(args):
for file in args:
print file + ':'
if differing(file):
showdiffs(file)
if askyesno('Check in ' + file + ' ? '):
sts = os.system('rcs -l ' + file) # ignored
sts = os.system('ci -l ' + file)
def differing(file):
cmd = 'co -p ' + file + ' 2>/dev/null | cmp -s - ' + file
sts = os.system(cmd)
return sts != 0
def showdiffs(file):
cmd = 'rcsdiff ' + file + ' 2>&1 | ${PAGER-more}'
sts = os.system(cmd)
def askyesno(prompt):
s = raw_input(prompt)
return s in ['y', 'yes']
if __name__ == '__main__':
try:
setup()
go(getargs())
except KeyboardInterrupt:
print '[Intr]'
|
mahendra-r/edx-platform | refs/heads/master | lms/djangoapps/licenses/views.py | 147 | import logging
import json
import re
from urlparse import urlparse
from collections import namedtuple, defaultdict
from edxmako.shortcuts import render_to_string
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.http import HttpResponse, Http404
from django.views.decorators.csrf import requires_csrf_token
from licenses.models import CourseSoftware
from licenses.models import get_courses_licenses, get_or_create_license, get_license
log = logging.getLogger("edx.licenses")
License = namedtuple('License', 'software serial')
def get_licenses_by_course(user, courses):
licenses = get_courses_licenses(user, courses)
licenses_by_course = defaultdict(list)
# create missing licenses and group by course_id
for software, license in licenses.iteritems():
if license is None:
licenses[software] = get_or_create_license(user, software)
course_id = software.course_id
serial = license.serial if license else None
licenses_by_course[course_id].append(License(software, serial))
# render elements
data_by_course = {}
for course_id, licenses in licenses_by_course.iteritems():
context = {'licenses': licenses}
template = 'licenses/serial_numbers.html'
data_by_course[course_id] = render_to_string(template, context)
return data_by_course
@login_required
@requires_csrf_token
def user_software_license(request):
if request.method != 'POST' or not request.is_ajax():
raise Http404
# get the course id from the referer
url_path = urlparse(request.META.get('HTTP_REFERER', '')).path
pattern = re.compile('^/courses/(?P<id>[^/]+/[^/]+/[^/]+)/.*/?$')
match = re.match(pattern, url_path)
if not match:
raise Http404
course_id = match.groupdict().get('id', '')
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
user_id = request.session.get('_auth_user_id')
software_name = request.POST.get('software')
generate = request.POST.get('generate', False) == 'true'
try:
software = CourseSoftware.objects.get(name=software_name,
course_id=course_key)
except CourseSoftware.DoesNotExist:
raise Http404
try:
user = User.objects.get(id=user_id)
except User.DoesNotExist:
raise Http404
if generate:
software_license = get_or_create_license(user, software)
else:
software_license = get_license(user, software)
if software_license:
response = {'serial': software_license.serial}
else:
response = {'error': 'No serial number found'}
return HttpResponse(json.dumps(response), mimetype='application/json')
|
aavanian/bokeh | refs/heads/master | sphinx/source/docs/user_guide/examples/plotting_legends.py | 62 | import numpy as np
from bokeh.plotting import output_file, show, figure
x = np.linspace(0, 4*np.pi, 100)
y = np.sin(x)
output_file("legend.html")
p = figure()
p.circle(x, y, legend="sin(x)")
p.line(x, y, legend="sin(x)")
p.line(x, 2*y, legend="2*sin(x)",
line_dash=[4, 4], line_color="orange", line_width=2)
p.square(x, 3*y, legend="3*sin(x)", fill_color=None, line_color="green")
p.line(x, 3*y, legend="3*sin(x)", line_color="green")
show(p)
|
jessevig/bertviz | refs/heads/master | bertviz/head_view.py | 1 | import json
import os
import uuid
from IPython.core.display import display, HTML, Javascript
from .util import format_special_chars, format_attention
def head_view(
attention=None,
tokens=None,
sentence_b_start=None,
prettify_tokens=True,
layer=None,
heads=None,
encoder_attention=None,
decoder_attention=None,
cross_attention=None,
encoder_tokens=None,
decoder_tokens=None,
):
"""Render head view
Args:
For self-attention models:
attention: list of ``torch.FloatTensor``(one for each layer) of shape
``(batch_size(must be 1), num_heads, sequence_length, sequence_length)``
tokens: list of tokens
sentence_b_start: index of first wordpiece in sentence B if input text is sentence pair (optional)
For encoder-decoder models:
encoder_attention: list of ``torch.FloatTensor``(one for each layer) of shape
``(batch_size(must be 1), num_heads, encoder_sequence_length, encoder_sequence_length)``
decoder_attention: list of ``torch.FloatTensor``(one for each layer) of shape
``(batch_size(must be 1), num_heads, decoder_sequence_length, decoder_sequence_length)``
cross_attention: list of ``torch.FloatTensor``(one for each layer) of shape
``(batch_size(must be 1), num_heads, decoder_sequence_length, encoder_sequence_length)``
encoder_tokens: list of tokens for encoder input
decoder_tokens: list of tokens for decoder input
For all models:
prettify_tokens: indicates whether to remove special characters in wordpieces, e.g. Ġ
layer: index of layer to show in visualization when first loads. If non specified, defaults to layer 0.
heads: indices of heads to show in visualization when first loads. If non specified, defaults to all.
"""
attn_data = []
if attention is not None:
if tokens is None:
raise ValueError("'tokens' is required")
if encoder_attention is not None or decoder_attention is not None or cross_attention is not None \
or encoder_tokens is not None or decoder_tokens is not None:
raise ValueError("If you specify 'attention' you may not specify any encoder-decoder arguments. This"
" argument is only for self-attention models.")
attention = format_attention(attention)
if sentence_b_start is None:
attn_data.append(
{
'name': None,
'attn': attention.tolist(),
'left_text': tokens,
'right_text': tokens
}
)
else:
slice_a = slice(0, sentence_b_start) # Positions corresponding to sentence A in input
slice_b = slice(sentence_b_start, len(tokens)) # Position corresponding to sentence B in input
attn_data.append(
{
'name': 'All',
'attn': attention.tolist(),
'left_text': tokens,
'right_text': tokens
}
)
attn_data.append(
{
'name': 'Sentence A -> Sentence A',
'attn': attention[:, :, slice_a, slice_a].tolist(),
'left_text': tokens[slice_a],
'right_text': tokens[slice_a]
}
)
attn_data.append(
{
'name': 'Sentence B -> Sentence B',
'attn': attention[:, :, slice_b, slice_b].tolist(),
'left_text': tokens[slice_b],
'right_text': tokens[slice_b]
}
)
attn_data.append(
{
'name': 'Sentence A -> Sentence B',
'attn': attention[:, :, slice_a, slice_b].tolist(),
'left_text': tokens[slice_a],
'right_text': tokens[slice_b]
}
)
attn_data.append(
{
'name': 'Sentence B -> Sentence A',
'attn': attention[:, :, slice_b, slice_a].tolist(),
'left_text': tokens[slice_b],
'right_text': tokens[slice_a]
}
)
elif encoder_attention is not None or decoder_attention is not None or cross_attention is not None:
if encoder_attention is not None:
if encoder_tokens is None:
raise ValueError("'encoder_tokens' required if 'encoder_attention' is not None")
encoder_attention = format_attention(encoder_attention)
attn_data.append(
{
'name': 'Encoder',
'attn': encoder_attention.tolist(),
'left_text': encoder_tokens,
'right_text': encoder_tokens
}
)
if decoder_attention is not None:
if decoder_tokens is None:
raise ValueError("'decoder_tokens' required if 'decoder_attention' is not None")
decoder_attention = format_attention(decoder_attention)
attn_data.append(
{
'name': 'Decoder',
'attn': decoder_attention.tolist(),
'left_text': decoder_tokens,
'right_text': decoder_tokens
}
)
if cross_attention is not None:
if encoder_tokens is None:
raise ValueError("'encoder_tokens' required if 'cross_attention' is not None")
if decoder_tokens is None:
raise ValueError("'decoder_tokens' required if 'cross_attention' is not None")
cross_attention = format_attention(cross_attention)
attn_data.append(
{
'name': 'Cross',
'attn': cross_attention.tolist(),
'left_text': decoder_tokens,
'right_text': encoder_tokens
}
)
else:
raise ValueError("You must specify at least one attention argument.")
# Generate unique div id to enable multiple visualizations in one notebook
vis_id = 'bertviz-%s'%(uuid.uuid4().hex)
# Compose html
if len(attn_data) > 1:
options = '\n'.join(
f'<option value="{i}">{attn_data[i]["name"]}</option>'
for i, d in enumerate(attn_data)
)
select_html = f'Attention: <select id="filter">{options}</select>'
else:
select_html = ""
vis_html = f"""
<div id='%s'>
<span style="user-select:none">
Layer: <select id="layer"></select>
{select_html}
</span>
<div id='vis'></div>
</div>
"""%(vis_id)
for d in attn_data:
attn_seq_len_left = len(d['attn'][0][0])
if attn_seq_len_left != len(d['left_text']):
raise ValueError(
f"Attention has {attn_seq_len_left} positions, while number of tokens is {len(d['left_text'])} "
f"for tokens: {' '.join(d['left_text'])}"
)
attn_seq_len_right = len(d['attn'][0][0][0])
if attn_seq_len_right != len(d['right_text']):
raise ValueError(
f"Attention has {attn_seq_len_right} positions, while number of tokens is {len(d['right_text'])} "
f"for tokens: {' '.join(d['right_text'])}"
)
if prettify_tokens:
d['left_text'] = format_special_chars(d['left_text'])
d['right_text'] = format_special_chars(d['right_text'])
params = {
'attention': attn_data,
'default_filter': "0",
'root_div_id': vis_id,
'layer': layer,
'heads': heads
}
# require.js must be imported for Colab or JupyterLab:
display(HTML('<script src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js"></script>'))
display(HTML(vis_html))
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
vis_js = open(os.path.join(__location__, 'head_view.js')).read().replace("PYTHON_PARAMS", json.dumps(params))
display(Javascript(vis_js)) |
OrkoHunter/networkx | refs/heads/master | examples/drawing/ego_graph.py | 4 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Example using the NetworkX ego_graph() function to return the main egonet of
the largest hub in a Barabási-Albert network.
"""
__author__="""Drew Conway (drew.conway@nyu.edu)"""
from operator import itemgetter
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
# Create a BA model graph
n=1000
m=2
G=nx.generators.barabasi_albert_graph(n,m)
# find node with largest degree
node_and_degree=G.degree()
(largest_hub,degree)=sorted(node_and_degree,key=itemgetter(1))[-1]
# Create ego graph of main hub
hub_ego=nx.ego_graph(G,largest_hub)
# Draw graph
pos=nx.spring_layout(hub_ego)
nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False)
# Draw ego as large and red
nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r')
plt.savefig('ego_graph.png')
plt.show()
|
kallewoof/bitcoin | refs/heads/master | test/functional/test_framework/script.py | 19 | #!/usr/bin/env python3
# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Functionality to build scripts, as well as signature hash functions.
This file is modified from python-bitcoinlib.
"""
from collections import namedtuple
import hashlib
import struct
import unittest
from typing import List, Dict
from .key import TaggedHash, tweak_add_pubkey
from .messages import (
CTransaction,
CTxOut,
hash256,
ser_string,
ser_uint256,
sha256,
uint256_from_str,
)
MAX_SCRIPT_ELEMENT_SIZE = 520
LOCKTIME_THRESHOLD = 500000000
ANNEX_TAG = 0x50
LEAF_VERSION_TAPSCRIPT = 0xc0
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
def bn2vch(v):
"""Convert number to bitcoin-specific little endian format."""
# We need v.bit_length() bits, plus a sign bit for every nonzero number.
n_bits = v.bit_length() + (v != 0)
# The number of bytes for that is:
n_bytes = (n_bits + 7) // 8
# Convert number to absolute value + sign in top bit.
encoded_v = 0 if v == 0 else abs(v) | ((v < 0) << (n_bytes * 8 - 1))
# Serialize to bytes
return encoded_v.to_bytes(n_bytes, 'little')
class CScriptOp(int):
"""A single script opcode"""
__slots__ = ()
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bytes([len(d)]) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bytes([len(d)]) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n - 1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1 + 1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super().__new__(cls, n))
return _opcode_instances[n]
OPCODE_NAMES: Dict[CScriptOp, str] = {}
_opcode_instances: List[CScriptOp] = []
# Populate opcode instance table
for n in range(0xff + 1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE = OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# BIP 342 opcodes (Tapscript)
OP_CHECKSIGADD = CScriptOp(0xba)
OP_INVALIDOPCODE = CScriptOp(0xff)
OPCODE_NAMES.update({
OP_0: 'OP_0',
OP_PUSHDATA1: 'OP_PUSHDATA1',
OP_PUSHDATA2: 'OP_PUSHDATA2',
OP_PUSHDATA4: 'OP_PUSHDATA4',
OP_1NEGATE: 'OP_1NEGATE',
OP_RESERVED: 'OP_RESERVED',
OP_1: 'OP_1',
OP_2: 'OP_2',
OP_3: 'OP_3',
OP_4: 'OP_4',
OP_5: 'OP_5',
OP_6: 'OP_6',
OP_7: 'OP_7',
OP_8: 'OP_8',
OP_9: 'OP_9',
OP_10: 'OP_10',
OP_11: 'OP_11',
OP_12: 'OP_12',
OP_13: 'OP_13',
OP_14: 'OP_14',
OP_15: 'OP_15',
OP_16: 'OP_16',
OP_NOP: 'OP_NOP',
OP_VER: 'OP_VER',
OP_IF: 'OP_IF',
OP_NOTIF: 'OP_NOTIF',
OP_VERIF: 'OP_VERIF',
OP_VERNOTIF: 'OP_VERNOTIF',
OP_ELSE: 'OP_ELSE',
OP_ENDIF: 'OP_ENDIF',
OP_VERIFY: 'OP_VERIFY',
OP_RETURN: 'OP_RETURN',
OP_TOALTSTACK: 'OP_TOALTSTACK',
OP_FROMALTSTACK: 'OP_FROMALTSTACK',
OP_2DROP: 'OP_2DROP',
OP_2DUP: 'OP_2DUP',
OP_3DUP: 'OP_3DUP',
OP_2OVER: 'OP_2OVER',
OP_2ROT: 'OP_2ROT',
OP_2SWAP: 'OP_2SWAP',
OP_IFDUP: 'OP_IFDUP',
OP_DEPTH: 'OP_DEPTH',
OP_DROP: 'OP_DROP',
OP_DUP: 'OP_DUP',
OP_NIP: 'OP_NIP',
OP_OVER: 'OP_OVER',
OP_PICK: 'OP_PICK',
OP_ROLL: 'OP_ROLL',
OP_ROT: 'OP_ROT',
OP_SWAP: 'OP_SWAP',
OP_TUCK: 'OP_TUCK',
OP_CAT: 'OP_CAT',
OP_SUBSTR: 'OP_SUBSTR',
OP_LEFT: 'OP_LEFT',
OP_RIGHT: 'OP_RIGHT',
OP_SIZE: 'OP_SIZE',
OP_INVERT: 'OP_INVERT',
OP_AND: 'OP_AND',
OP_OR: 'OP_OR',
OP_XOR: 'OP_XOR',
OP_EQUAL: 'OP_EQUAL',
OP_EQUALVERIFY: 'OP_EQUALVERIFY',
OP_RESERVED1: 'OP_RESERVED1',
OP_RESERVED2: 'OP_RESERVED2',
OP_1ADD: 'OP_1ADD',
OP_1SUB: 'OP_1SUB',
OP_2MUL: 'OP_2MUL',
OP_2DIV: 'OP_2DIV',
OP_NEGATE: 'OP_NEGATE',
OP_ABS: 'OP_ABS',
OP_NOT: 'OP_NOT',
OP_0NOTEQUAL: 'OP_0NOTEQUAL',
OP_ADD: 'OP_ADD',
OP_SUB: 'OP_SUB',
OP_MUL: 'OP_MUL',
OP_DIV: 'OP_DIV',
OP_MOD: 'OP_MOD',
OP_LSHIFT: 'OP_LSHIFT',
OP_RSHIFT: 'OP_RSHIFT',
OP_BOOLAND: 'OP_BOOLAND',
OP_BOOLOR: 'OP_BOOLOR',
OP_NUMEQUAL: 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY: 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL: 'OP_NUMNOTEQUAL',
OP_LESSTHAN: 'OP_LESSTHAN',
OP_GREATERTHAN: 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL: 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL: 'OP_GREATERTHANOREQUAL',
OP_MIN: 'OP_MIN',
OP_MAX: 'OP_MAX',
OP_WITHIN: 'OP_WITHIN',
OP_RIPEMD160: 'OP_RIPEMD160',
OP_SHA1: 'OP_SHA1',
OP_SHA256: 'OP_SHA256',
OP_HASH160: 'OP_HASH160',
OP_HASH256: 'OP_HASH256',
OP_CODESEPARATOR: 'OP_CODESEPARATOR',
OP_CHECKSIG: 'OP_CHECKSIG',
OP_CHECKSIGVERIFY: 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG: 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY: 'OP_CHECKMULTISIGVERIFY',
OP_NOP1: 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY: 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY: 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4: 'OP_NOP4',
OP_NOP5: 'OP_NOP5',
OP_NOP6: 'OP_NOP6',
OP_NOP7: 'OP_NOP7',
OP_NOP8: 'OP_NOP8',
OP_NOP9: 'OP_NOP9',
OP_NOP10: 'OP_NOP10',
OP_CHECKSIGADD: 'OP_CHECKSIGADD',
OP_INVALIDOPCODE: 'OP_INVALIDOPCODE',
})
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super().__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum:
__slots__ = ("value",)
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes([len(r)]) + r
@staticmethod
def decode(vch):
result = 0
# We assume valid push_size and minimal encoding
value = vch[1:]
if len(value) == 0:
return result
for i, byte in enumerate(value):
result |= int(byte) << 8 * i
if value[-1] >= 0x80:
# Mask for all but the highest result bit
num_mask = (2**(len(value) * 8) - 1) >> 1
result &= num_mask
result *= -1
return result
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
__slots__ = ()
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bytes([other])
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bytes([CScriptOp(OP_0)])
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes([CScriptOp.encode_op_n(other)])
elif other == -1:
other = bytes([OP_1NEGATE])
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# add makes no sense for a CScript()
raise NotImplementedError
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super().__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super().__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = self[i]
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = self[i]
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = self[i] + (self[i + 1] << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = self[i] + (self[i + 1] << 8) + (self[i + 2] << 16) + (self[i + 3] << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i + datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
def _repr(o):
if isinstance(o, bytes):
return "x('%s')" % o.hex()
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_DEFAULT = 0 # Taproot-only default, semantics same as SIGHASH_ALL
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def LegacySignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for _ in range(outIdx):
txtmp.vout.append(CTxOut(-1))
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize_without_witness()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitV0SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)
class TestFrameworkScript(unittest.TestCase):
def test_bn2vch(self):
self.assertEqual(bn2vch(0), bytes([]))
self.assertEqual(bn2vch(1), bytes([0x01]))
self.assertEqual(bn2vch(-1), bytes([0x81]))
self.assertEqual(bn2vch(0x7F), bytes([0x7F]))
self.assertEqual(bn2vch(-0x7F), bytes([0xFF]))
self.assertEqual(bn2vch(0x80), bytes([0x80, 0x00]))
self.assertEqual(bn2vch(-0x80), bytes([0x80, 0x80]))
self.assertEqual(bn2vch(0xFF), bytes([0xFF, 0x00]))
self.assertEqual(bn2vch(-0xFF), bytes([0xFF, 0x80]))
self.assertEqual(bn2vch(0x100), bytes([0x00, 0x01]))
self.assertEqual(bn2vch(-0x100), bytes([0x00, 0x81]))
self.assertEqual(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
self.assertEqual(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
self.assertEqual(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
self.assertEqual(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
self.assertEqual(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
self.assertEqual(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
self.assertEqual(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
self.assertEqual(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
def test_cscriptnum_encoding(self):
# round-trip negative and multi-byte CScriptNums
values = [0, 1, -1, -2, 127, 128, -255, 256, (1 << 15) - 1, -(1 << 16), (1 << 24) - 1, (1 << 31), 1 - (1 << 32), 1 << 40, 1500, -1500]
for value in values:
self.assertEqual(CScriptNum.decode(CScriptNum.encode(CScriptNum(value))), value)
def TaprootSignatureHash(txTo, spent_utxos, hash_type, input_index = 0, scriptpath = False, script = CScript(), codeseparator_pos = -1, annex = None, leaf_ver = LEAF_VERSION_TAPSCRIPT):
assert (len(txTo.vin) == len(spent_utxos))
assert (input_index < len(txTo.vin))
out_type = SIGHASH_ALL if hash_type == 0 else hash_type & 3
in_type = hash_type & SIGHASH_ANYONECANPAY
spk = spent_utxos[input_index].scriptPubKey
ss = bytes([0, hash_type]) # epoch, hash_type
ss += struct.pack("<i", txTo.nVersion)
ss += struct.pack("<I", txTo.nLockTime)
if in_type != SIGHASH_ANYONECANPAY:
ss += sha256(b"".join(i.prevout.serialize() for i in txTo.vin))
ss += sha256(b"".join(struct.pack("<q", u.nValue) for u in spent_utxos))
ss += sha256(b"".join(ser_string(u.scriptPubKey) for u in spent_utxos))
ss += sha256(b"".join(struct.pack("<I", i.nSequence) for i in txTo.vin))
if out_type == SIGHASH_ALL:
ss += sha256(b"".join(o.serialize() for o in txTo.vout))
spend_type = 0
if annex is not None:
spend_type |= 1
if (scriptpath):
spend_type |= 2
ss += bytes([spend_type])
if in_type == SIGHASH_ANYONECANPAY:
ss += txTo.vin[input_index].prevout.serialize()
ss += struct.pack("<q", spent_utxos[input_index].nValue)
ss += ser_string(spk)
ss += struct.pack("<I", txTo.vin[input_index].nSequence)
else:
ss += struct.pack("<I", input_index)
if (spend_type & 1):
ss += sha256(ser_string(annex))
if out_type == SIGHASH_SINGLE:
if input_index < len(txTo.vout):
ss += sha256(txTo.vout[input_index].serialize())
else:
ss += bytes(0 for _ in range(32))
if (scriptpath):
ss += TaggedHash("TapLeaf", bytes([leaf_ver]) + ser_string(script))
ss += bytes([0])
ss += struct.pack("<i", codeseparator_pos)
assert len(ss) == 175 - (in_type == SIGHASH_ANYONECANPAY) * 49 - (out_type != SIGHASH_ALL and out_type != SIGHASH_SINGLE) * 32 + (annex is not None) * 32 + scriptpath * 37
return TaggedHash("TapSighash", ss)
def taproot_tree_helper(scripts):
if len(scripts) == 0:
return ([], bytes())
if len(scripts) == 1:
# One entry: treat as a leaf
script = scripts[0]
assert(not callable(script))
if isinstance(script, list):
return taproot_tree_helper(script)
assert(isinstance(script, tuple))
version = LEAF_VERSION_TAPSCRIPT
name = script[0]
code = script[1]
if len(script) == 3:
version = script[2]
assert version & 1 == 0
assert isinstance(code, bytes)
h = TaggedHash("TapLeaf", bytes([version]) + ser_string(code))
if name is None:
return ([], h)
return ([(name, version, code, bytes())], h)
elif len(scripts) == 2 and callable(scripts[1]):
# Two entries, and the right one is a function
left, left_h = taproot_tree_helper(scripts[0:1])
right_h = scripts[1](left_h)
left = [(name, version, script, control + right_h) for name, version, script, control in left]
right = []
else:
# Two or more entries: descend into each side
split_pos = len(scripts) // 2
left, left_h = taproot_tree_helper(scripts[0:split_pos])
right, right_h = taproot_tree_helper(scripts[split_pos:])
left = [(name, version, script, control + right_h) for name, version, script, control in left]
right = [(name, version, script, control + left_h) for name, version, script, control in right]
if right_h < left_h:
right_h, left_h = left_h, right_h
h = TaggedHash("TapBranch", left_h + right_h)
return (left + right, h)
# A TaprootInfo object has the following fields:
# - scriptPubKey: the scriptPubKey (witness v1 CScript)
# - internal_pubkey: the internal pubkey (32 bytes)
# - negflag: whether the pubkey in the scriptPubKey was negated from internal_pubkey+tweak*G (bool).
# - tweak: the tweak (32 bytes)
# - leaves: a dict of name -> TaprootLeafInfo objects for all known leaves
TaprootInfo = namedtuple("TaprootInfo", "scriptPubKey,internal_pubkey,negflag,tweak,leaves")
# A TaprootLeafInfo object has the following fields:
# - script: the leaf script (CScript or bytes)
# - version: the leaf version (0xc0 for BIP342 tapscript)
# - merklebranch: the merkle branch to use for this leaf (32*N bytes)
TaprootLeafInfo = namedtuple("TaprootLeafInfo", "script,version,merklebranch")
def taproot_construct(pubkey, scripts=None):
"""Construct a tree of Taproot spending conditions
pubkey: a 32-byte xonly pubkey for the internal pubkey (bytes)
scripts: a list of items; each item is either:
- a (name, CScript or bytes, leaf version) tuple
- a (name, CScript or bytes) tuple (defaulting to leaf version 0xc0)
- another list of items (with the same structure)
- a list of two items; the first of which is an item itself, and the
second is a function. The function takes as input the Merkle root of the
first item, and produces a (fictitious) partner to hash with.
Returns: a TaprootInfo object
"""
if scripts is None:
scripts = []
ret, h = taproot_tree_helper(scripts)
tweak = TaggedHash("TapTweak", pubkey + h)
tweaked, negated = tweak_add_pubkey(pubkey, tweak)
leaves = dict((name, TaprootLeafInfo(script, version, merklebranch)) for name, version, script, merklebranch in ret)
return TaprootInfo(CScript([OP_1, tweaked]), pubkey, negated + 0, tweak, leaves)
def is_op_success(o):
return o == 0x50 or o == 0x62 or o == 0x89 or o == 0x8a or o == 0x8d or o == 0x8e or (o >= 0x7e and o <= 0x81) or (o >= 0x83 and o <= 0x86) or (o >= 0x95 and o <= 0x99) or (o >= 0xbb and o <= 0xfe)
|
Islandman93/reinforcepy | refs/heads/master | reinforcepy/learners/base_learner.py | 1 | from abc import ABCMeta
class BaseLearner(metaclass=ABCMeta):
"""
Base leaner abstract class.
"""
def run_episode(self, environment):
"""
Runs an episode. The learner can determine when the episode has stopped
Should return the total reward
"""
raise NotImplementedError("Subclasses must implement run_episode")
|
TidyHuang/floodlight | refs/heads/master | src/main/python/PythonClient.py | 152 | #!/usr/bin/env python
import sys
sys.path.append('../../../target/gen-py')
from packetstreamer import PacketStreamer
from packetstreamer.ttypes import *
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
try:
# Make socket
transport = TSocket.TSocket('localhost', 9090)
# Buffering is critical. Raw sockets are very slow
transport = TTransport.TFramedTransport(transport)
# Wrap in a protocol
protocol = TBinaryProtocol.TBinaryProtocol(transport)
# Create a client to use the protocol encoder
client = PacketStreamer.Client(protocol)
# Connect!
transport.open()
while 1:
packets = client.getPackets("session1")
print 'session1 packets num: %d' % (len(packets))
count = 1
for packet in packets:
print "Packet %d: %s"% (count, packet)
if "FilterTimeout" in packet:
sys.exit()
count += 1
# Close!
transport.close()
except Thrift.TException, tx:
print '%s' % (tx.message)
except KeyboardInterrupt, e:
print 'Bye-bye'
|
gorjuce/odoo | refs/heads/8.0 | addons/resource/resource.py | 81 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP SA (http://www.openerp.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
from dateutil import rrule
from dateutil.relativedelta import relativedelta
from operator import itemgetter
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.float_utils import float_compare
from openerp.tools.translate import _
import pytz
class resource_calendar(osv.osv):
""" Calendar model for a resource. It has
- attendance_ids: list of resource.calendar.attendance that are a working
interval in a given weekday.
- leave_ids: list of leaves linked to this calendar. A leave can be general
or linked to a specific resource, depending on its resource_id.
All methods in this class use intervals. An interval is a tuple holding
(begin_datetime, end_datetime). A list of intervals is therefore a list of
tuples, holding several intervals of work or leaves. """
_name = "resource.calendar"
_description = "Resource Calendar"
_columns = {
'name': fields.char("Name", required=True),
'company_id': fields.many2one('res.company', 'Company', required=False),
'attendance_ids': fields.one2many('resource.calendar.attendance', 'calendar_id', 'Working Time', copy=True),
'manager': fields.many2one('res.users', 'Workgroup Manager'),
'leave_ids': fields.one2many(
'resource.calendar.leaves', 'calendar_id', 'Leaves',
help=''
),
}
_defaults = {
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'resource.calendar', context=context)
}
# --------------------------------------------------
# Utility methods
# --------------------------------------------------
def interval_clean(self, intervals):
""" Utility method that sorts and removes overlapping inside datetime
intervals. The intervals are sorted based on increasing starting datetime.
Overlapping intervals are merged into a single one.
:param list intervals: list of intervals; each interval is a tuple
(datetime_from, datetime_to)
:return list cleaned: list of sorted intervals without overlap """
intervals = sorted(intervals, key=itemgetter(0)) # sort on first datetime
cleaned = []
working_interval = None
while intervals:
current_interval = intervals.pop(0)
if not working_interval: # init
working_interval = [current_interval[0], current_interval[1]]
elif working_interval[1] < current_interval[0]: # interval is disjoint
cleaned.append(tuple(working_interval))
working_interval = [current_interval[0], current_interval[1]]
elif working_interval[1] < current_interval[1]: # union of greater intervals
working_interval[1] = current_interval[1]
if working_interval: # handle void lists
cleaned.append(tuple(working_interval))
return cleaned
def interval_remove_leaves(self, interval, leave_intervals):
""" Utility method that remove leave intervals from a base interval:
- clean the leave intervals, to have an ordered list of not-overlapping
intervals
- initiate the current interval to be the base interval
- for each leave interval:
- finishing before the current interval: skip, go to next
- beginning after the current interval: skip and get out of the loop
because we are outside range (leaves are ordered)
- beginning within the current interval: close the current interval
and begin a new current interval that begins at the end of the leave
interval
- ending within the current interval: update the current interval begin
to match the leave interval ending
:param tuple interval: a tuple (beginning datetime, ending datetime) that
is the base interval from which the leave intervals
will be removed
:param list leave_intervals: a list of tuples (beginning datetime, ending datetime)
that are intervals to remove from the base interval
:return list intervals: a list of tuples (begin datetime, end datetime)
that are the remaining valid intervals """
if not interval:
return interval
if leave_intervals is None:
leave_intervals = []
intervals = []
leave_intervals = self.interval_clean(leave_intervals)
current_interval = [interval[0], interval[1]]
for leave in leave_intervals:
if leave[1] <= current_interval[0]:
continue
if leave[0] >= current_interval[1]:
break
if current_interval[0] < leave[0] < current_interval[1]:
current_interval[1] = leave[0]
intervals.append((current_interval[0], current_interval[1]))
current_interval = [leave[1], interval[1]]
# if current_interval[0] <= leave[1] <= current_interval[1]:
if current_interval[0] <= leave[1]:
current_interval[0] = leave[1]
if current_interval and current_interval[0] < interval[1]: # remove intervals moved outside base interval due to leaves
intervals.append((current_interval[0], current_interval[1]))
return intervals
def interval_schedule_hours(self, intervals, hour, remove_at_end=True):
""" Schedule hours in intervals. The last matching interval is truncated
to match the specified hours.
It is possible to truncate the last interval at its beginning or ending.
However this does nothing on the given interval order that should be
submitted accordingly.
:param list intervals: a list of tuples (beginning datetime, ending datetime)
:param int/float hours: number of hours to schedule. It will be converted
into a timedelta, but should be submitted as an
int or float.
:param boolean remove_at_end: remove extra hours at the end of the last
matching interval. Otherwise, do it at the
beginning.
:return list results: a list of intervals. If the number of hours to schedule
is greater than the possible scheduling in the intervals, no extra-scheduling
is done, and results == intervals. """
results = []
res = datetime.timedelta()
limit = datetime.timedelta(hours=hour)
for interval in intervals:
res += interval[1] - interval[0]
if res > limit and remove_at_end:
interval = (interval[0], interval[1] + relativedelta(seconds=seconds(limit-res)))
elif res > limit:
interval = (interval[0] + relativedelta(seconds=seconds(res-limit)), interval[1])
results.append(interval)
if res > limit:
break
return results
# --------------------------------------------------
# Date and hours computation
# --------------------------------------------------
def get_attendances_for_weekdays(self, cr, uid, id, weekdays, context=None):
""" Given a list of weekdays, return matching resource.calendar.attendance"""
calendar = self.browse(cr, uid, id, context=None)
return [att for att in calendar.attendance_ids if int(att.dayofweek) in weekdays]
def get_weekdays(self, cr, uid, id, default_weekdays=None, context=None):
""" Return the list of weekdays that contain at least one working interval.
If no id is given (no calendar), return default weekdays. """
if id is None:
return default_weekdays if default_weekdays is not None else [0, 1, 2, 3, 4]
calendar = self.browse(cr, uid, id, context=None)
weekdays = set()
for attendance in calendar.attendance_ids:
weekdays.add(int(attendance.dayofweek))
return list(weekdays)
def get_next_day(self, cr, uid, id, day_date, context=None):
""" Get following date of day_date, based on resource.calendar. If no
calendar is provided, just return the next day.
:param int id: id of a resource.calendar. If not given, simply add one day
to the submitted date.
:param date day_date: current day as a date
:return date: next day of calendar, or just next day """
if not id:
return day_date + relativedelta(days=1)
weekdays = self.get_weekdays(cr, uid, id, context)
base_index = -1
for weekday in weekdays:
if weekday > day_date.weekday():
break
base_index += 1
new_index = (base_index + 1) % len(weekdays)
days = (weekdays[new_index] - day_date.weekday())
if days < 0:
days = 7 + days
return day_date + relativedelta(days=days)
def get_previous_day(self, cr, uid, id, day_date, context=None):
""" Get previous date of day_date, based on resource.calendar. If no
calendar is provided, just return the previous day.
:param int id: id of a resource.calendar. If not given, simply remove
one day from the submitted date.
:param date day_date: current day as a date
:return date: previous day of calendar, or just previous day """
if not id:
return day_date + relativedelta(days=-1)
weekdays = self.get_weekdays(cr, uid, id, context)
weekdays.reverse()
base_index = -1
for weekday in weekdays:
if weekday < day_date.weekday():
break
base_index += 1
new_index = (base_index + 1) % len(weekdays)
days = (weekdays[new_index] - day_date.weekday())
if days > 0:
days = days - 7
return day_date + relativedelta(days=days)
def get_leave_intervals(self, cr, uid, id, resource_id=None,
start_datetime=None, end_datetime=None,
context=None):
"""Get the leaves of the calendar. Leaves can be filtered on the resource,
the start datetime or the end datetime.
:param int resource_id: the id of the resource to take into account when
computing the leaves. If not set, only general
leaves are computed. If set, generic and
specific leaves are computed.
:param datetime start_datetime: if provided, do not take into account leaves
ending before this date.
:param datetime end_datetime: if provided, do not take into account leaves
beginning after this date.
:return list leaves: list of tuples (start_datetime, end_datetime) of
leave intervals
"""
resource_calendar = self.browse(cr, uid, id, context=context)
leaves = []
for leave in resource_calendar.leave_ids:
if leave.resource_id and not resource_id == leave.resource_id.id:
continue
date_from = datetime.datetime.strptime(leave.date_from, tools.DEFAULT_SERVER_DATETIME_FORMAT)
if end_datetime and date_from > end_datetime:
continue
date_to = datetime.datetime.strptime(leave.date_to, tools.DEFAULT_SERVER_DATETIME_FORMAT)
if start_datetime and date_to < start_datetime:
continue
leaves.append((date_from, date_to))
return leaves
def get_working_intervals_of_day(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False, resource_id=None,
default_interval=None, context=None):
""" Get the working intervals of the day based on calendar. This method
handle leaves that come directly from the leaves parameter or can be computed.
:param int id: resource.calendar id; take the first one if is a list
:param datetime start_dt: datetime object that is the beginning hours
for the working intervals computation; any
working interval beginning before start_dt
will be truncated. If not set, set to end_dt
or today() if no end_dt at 00.00.00.
:param datetime end_dt: datetime object that is the ending hour
for the working intervals computation; any
working interval ending after end_dt
will be truncated. If not set, set to start_dt()
at 23.59.59.
:param list leaves: a list of tuples(start_datetime, end_datetime) that
represent leaves.
:param boolean compute_leaves: if set and if leaves is None, compute the
leaves based on calendar and resource.
If leaves is None and compute_leaves false
no leaves are taken into account.
:param int resource_id: the id of the resource to take into account when
computing the leaves. If not set, only general
leaves are computed. If set, generic and
specific leaves are computed.
:param tuple default_interval: if no id, try to return a default working
day using default_interval[0] as beginning
hour, and default_interval[1] as ending hour.
Example: default_interval = (8, 16).
Otherwise, a void list of working intervals
is returned when id is None.
:return list intervals: a list of tuples (start_datetime, end_datetime)
of work intervals """
if isinstance(id, (list, tuple)):
id = id[0]
# Computes start_dt, end_dt (with default values if not set) + off-interval work limits
work_limits = []
if start_dt is None and end_dt is not None:
start_dt = end_dt.replace(hour=0, minute=0, second=0)
elif start_dt is None:
start_dt = datetime.datetime.now().replace(hour=0, minute=0, second=0)
else:
work_limits.append((start_dt.replace(hour=0, minute=0, second=0), start_dt))
if end_dt is None:
end_dt = start_dt.replace(hour=23, minute=59, second=59)
else:
work_limits.append((end_dt, end_dt.replace(hour=23, minute=59, second=59)))
assert start_dt.date() == end_dt.date(), 'get_working_intervals_of_day is restricted to one day'
intervals = []
work_dt = start_dt.replace(hour=0, minute=0, second=0)
# no calendar: try to use the default_interval, then return directly
if id is None:
working_interval = []
if default_interval:
working_interval = (start_dt.replace(hour=default_interval[0], minute=0, second=0), start_dt.replace(hour=default_interval[1], minute=0, second=0))
intervals = self.interval_remove_leaves(working_interval, work_limits)
return intervals
working_intervals = []
tz_info = fields.datetime.context_timestamp(cr, uid, work_dt, context=context).tzinfo
for calendar_working_day in self.get_attendances_for_weekdays(cr, uid, id, [start_dt.weekday()], context):
x = work_dt.replace(hour=int(calendar_working_day.hour_from))
y = work_dt.replace(hour=int(calendar_working_day.hour_to))
x = x.replace(tzinfo=tz_info).astimezone(pytz.UTC).replace(tzinfo=None)
y = y.replace(tzinfo=tz_info).astimezone(pytz.UTC).replace(tzinfo=None)
working_interval = (x, y)
working_intervals += self.interval_remove_leaves(working_interval, work_limits)
# find leave intervals
if leaves is None and compute_leaves:
leaves = self.get_leave_intervals(cr, uid, id, resource_id=resource_id, context=None)
# filter according to leaves
for interval in working_intervals:
work_intervals = self.interval_remove_leaves(interval, leaves)
intervals += work_intervals
return intervals
def get_working_hours_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False, resource_id=None,
default_interval=None, context=None):
""" Get the working hours of the day based on calendar. This method uses
get_working_intervals_of_day to have the work intervals of the day. It
then calculates the number of hours contained in those intervals. """
res = datetime.timedelta()
intervals = self.get_working_intervals_of_day(
cr, uid, id,
start_dt, end_dt, leaves,
compute_leaves, resource_id,
default_interval, context)
for interval in intervals:
res += interval[1] - interval[0]
return seconds(res) / 3600.0
def get_working_hours(self, cr, uid, id, start_dt, end_dt, compute_leaves=False,
resource_id=None, default_interval=None, context=None):
hours = 0.0
for day in rrule.rrule(rrule.DAILY, dtstart=start_dt,
until=(end_dt + datetime.timedelta(days=1)).replace(hour=0, minute=0, second=0),
byweekday=self.get_weekdays(cr, uid, id, context=context)):
day_start_dt = day.replace(hour=0, minute=0, second=0)
if start_dt and day.date() == start_dt.date():
day_start_dt = start_dt
day_end_dt = day.replace(hour=23, minute=59, second=59)
if end_dt and day.date() == end_dt.date():
day_end_dt = end_dt
hours += self.get_working_hours_of_date(
cr, uid, id, start_dt=day_start_dt, end_dt=day_end_dt,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval,
context=context)
return hours
# --------------------------------------------------
# Hours scheduling
# --------------------------------------------------
def _schedule_hours(self, cr, uid, id, hours, day_dt=None,
compute_leaves=False, resource_id=None,
default_interval=None, context=None):
""" Schedule hours of work, using a calendar and an optional resource to
compute working and leave days. This method can be used backwards, i.e.
scheduling days before a deadline.
:param int hours: number of hours to schedule. Use a negative number to
compute a backwards scheduling.
:param datetime day_dt: reference date to compute working days. If days is
> 0 date is the starting date. If days is < 0
date is the ending date.
:param boolean compute_leaves: if set, compute the leaves based on calendar
and resource. Otherwise no leaves are taken
into account.
:param int resource_id: the id of the resource to take into account when
computing the leaves. If not set, only general
leaves are computed. If set, generic and
specific leaves are computed.
:param tuple default_interval: if no id, try to return a default working
day using default_interval[0] as beginning
hour, and default_interval[1] as ending hour.
Example: default_interval = (8, 16).
Otherwise, a void list of working intervals
is returned when id is None.
:return tuple (datetime, intervals): datetime is the beginning/ending date
of the schedulign; intervals are the
working intervals of the scheduling.
Note: Why not using rrule.rrule ? Because rrule does not seem to allow
getting back in time.
"""
if day_dt is None:
day_dt = datetime.datetime.now()
backwards = (hours < 0)
hours = abs(hours)
intervals = []
remaining_hours = hours * 1.0
iterations = 0
current_datetime = day_dt
call_args = dict(compute_leaves=compute_leaves, resource_id=resource_id, default_interval=default_interval, context=context)
while float_compare(remaining_hours, 0.0, precision_digits=2) in (1, 0) and iterations < 1000:
if backwards:
call_args['end_dt'] = current_datetime
else:
call_args['start_dt'] = current_datetime
working_intervals = self.get_working_intervals_of_day(cr, uid, id, **call_args)
if id is None and not working_intervals: # no calendar -> consider working 8 hours
remaining_hours -= 8.0
elif working_intervals:
if backwards:
working_intervals.reverse()
new_working_intervals = self.interval_schedule_hours(working_intervals, remaining_hours, not backwards)
if backwards:
new_working_intervals.reverse()
res = datetime.timedelta()
for interval in working_intervals:
res += interval[1] - interval[0]
remaining_hours -= (seconds(res) / 3600.0)
if backwards:
intervals = new_working_intervals + intervals
else:
intervals = intervals + new_working_intervals
# get next day
if backwards:
current_datetime = datetime.datetime.combine(self.get_previous_day(cr, uid, id, current_datetime, context), datetime.time(23, 59, 59))
else:
current_datetime = datetime.datetime.combine(self.get_next_day(cr, uid, id, current_datetime, context), datetime.time())
# avoid infinite loops
iterations += 1
return intervals
def schedule_hours_get_date(self, cr, uid, id, hours, day_dt=None,
compute_leaves=False, resource_id=None,
default_interval=None, context=None):
""" Wrapper on _schedule_hours: return the beginning/ending datetime of
an hours scheduling. """
res = self._schedule_hours(cr, uid, id, hours, day_dt, compute_leaves, resource_id, default_interval, context)
return res and res[0][0] or False
def schedule_hours(self, cr, uid, id, hours, day_dt=None,
compute_leaves=False, resource_id=None,
default_interval=None, context=None):
""" Wrapper on _schedule_hours: return the working intervals of an hours
scheduling. """
return self._schedule_hours(cr, uid, id, hours, day_dt, compute_leaves, resource_id, default_interval, context)
# --------------------------------------------------
# Days scheduling
# --------------------------------------------------
def _schedule_days(self, cr, uid, id, days, day_date=None, compute_leaves=False,
resource_id=None, default_interval=None, context=None):
"""Schedule days of work, using a calendar and an optional resource to
compute working and leave days. This method can be used backwards, i.e.
scheduling days before a deadline.
:param int days: number of days to schedule. Use a negative number to
compute a backwards scheduling.
:param date day_date: reference date to compute working days. If days is > 0
date is the starting date. If days is < 0 date is the
ending date.
:param boolean compute_leaves: if set, compute the leaves based on calendar
and resource. Otherwise no leaves are taken
into account.
:param int resource_id: the id of the resource to take into account when
computing the leaves. If not set, only general
leaves are computed. If set, generic and
specific leaves are computed.
:param tuple default_interval: if no id, try to return a default working
day using default_interval[0] as beginning
hour, and default_interval[1] as ending hour.
Example: default_interval = (8, 16).
Otherwise, a void list of working intervals
is returned when id is None.
:return tuple (datetime, intervals): datetime is the beginning/ending date
of the schedulign; intervals are the
working intervals of the scheduling.
Implementation note: rrule.rrule is not used because rrule it des not seem
to allow getting back in time.
"""
if day_date is None:
day_date = datetime.datetime.now()
backwards = (days < 0)
days = abs(days)
intervals = []
planned_days = 0
iterations = 0
current_datetime = day_date.replace(hour=0, minute=0, second=0)
while planned_days < days and iterations < 1000:
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, current_datetime,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval,
context=context)
if id is None or working_intervals: # no calendar -> no working hours, but day is considered as worked
planned_days += 1
intervals += working_intervals
# get next day
if backwards:
current_datetime = self.get_previous_day(cr, uid, id, current_datetime, context)
else:
current_datetime = self.get_next_day(cr, uid, id, current_datetime, context)
# avoid infinite loops
iterations += 1
return intervals
def schedule_days_get_date(self, cr, uid, id, days, day_date=None, compute_leaves=False,
resource_id=None, default_interval=None, context=None):
""" Wrapper on _schedule_days: return the beginning/ending datetime of
a days scheduling. """
res = self._schedule_days(cr, uid, id, days, day_date, compute_leaves, resource_id, default_interval, context)
return res and res[-1][1] or False
def schedule_days(self, cr, uid, id, days, day_date=None, compute_leaves=False,
resource_id=None, default_interval=None, context=None):
""" Wrapper on _schedule_days: return the working intervals of a days
scheduling. """
return self._schedule_days(cr, uid, id, days, day_date, compute_leaves, resource_id, default_interval, context)
# --------------------------------------------------
# Compatibility / to clean / to remove
# --------------------------------------------------
def working_hours_on_day(self, cr, uid, resource_calendar_id, day, context=None):
""" Used in hr_payroll/hr_payroll.py
:deprecated: OpenERP saas-3. Use get_working_hours_of_date instead. Note:
since saas-3, take hour/minutes into account, not just the whole day."""
if isinstance(day, datetime.datetime):
day = day.replace(hour=0, minute=0)
return self.get_working_hours_of_date(cr, uid, resource_calendar_id.id, start_dt=day, context=None)
def interval_min_get(self, cr, uid, id, dt_from, hours, resource=False):
""" Schedule hours backwards. Used in mrp_operations/mrp_operations.py.
:deprecated: OpenERP saas-3. Use schedule_hours instead. Note: since
saas-3, counts leave hours instead of all-day leaves."""
return self.schedule_hours(
cr, uid, id, hours * -1.0,
day_dt=dt_from.replace(minute=0, second=0),
compute_leaves=True, resource_id=resource,
default_interval=(8, 16)
)
def interval_get_multi(self, cr, uid, date_and_hours_by_cal, resource=False, byday=True):
""" Used in mrp_operations/mrp_operations.py (default parameters) and in
interval_get()
:deprecated: OpenERP saas-3. Use schedule_hours instead. Note:
Byday was not used. Since saas-3, counts Leave hours instead of all-day leaves."""
res = {}
for dt_str, hours, calendar_id in date_and_hours_by_cal:
result = self.schedule_hours(
cr, uid, calendar_id, hours,
day_dt=datetime.datetime.strptime(dt_str, '%Y-%m-%d %H:%M:%S').replace(second=0),
compute_leaves=True, resource_id=resource,
default_interval=(8, 16)
)
res[(dt_str, hours, calendar_id)] = result
return res
def interval_get(self, cr, uid, id, dt_from, hours, resource=False, byday=True):
""" Unifier of interval_get_multi. Used in: mrp_operations/mrp_operations.py,
crm/crm_lead.py (res given).
:deprecated: OpenERP saas-3. Use get_working_hours instead."""
res = self.interval_get_multi(
cr, uid, [(dt_from.strftime('%Y-%m-%d %H:%M:%S'), hours, id)], resource, byday)[(dt_from.strftime('%Y-%m-%d %H:%M:%S'), hours, id)]
return res
def interval_hours_get(self, cr, uid, id, dt_from, dt_to, resource=False):
""" Unused wrapper.
:deprecated: OpenERP saas-3. Use get_working_hours instead."""
return self._interval_hours_get(cr, uid, id, dt_from, dt_to, resource_id=resource)
def _interval_hours_get(self, cr, uid, id, dt_from, dt_to, resource_id=False, timezone_from_uid=None, exclude_leaves=True, context=None):
""" Computes working hours between two dates, taking always same hour/minuts.
:deprecated: OpenERP saas-3. Use get_working_hours instead. Note: since saas-3,
now resets hour/minuts. Now counts leave hours instead of all-day leaves."""
return self.get_working_hours(
cr, uid, id, dt_from, dt_to,
compute_leaves=(not exclude_leaves), resource_id=resource_id,
default_interval=(8, 16), context=context)
class resource_calendar_attendance(osv.osv):
_name = "resource.calendar.attendance"
_description = "Work Detail"
_columns = {
'name' : fields.char("Name", required=True),
'dayofweek': fields.selection([('0','Monday'),('1','Tuesday'),('2','Wednesday'),('3','Thursday'),('4','Friday'),('5','Saturday'),('6','Sunday')], 'Day of Week', required=True, select=True),
'date_from' : fields.date('Starting Date'),
'hour_from' : fields.float('Work from', required=True, help="Start and End time of working.", select=True),
'hour_to' : fields.float("Work to", required=True),
'calendar_id' : fields.many2one("resource.calendar", "Resource's Calendar", required=True),
}
_order = 'dayofweek, hour_from'
_defaults = {
'dayofweek' : '0'
}
def hours_time_string(hours):
""" convert a number of hours (float) into a string with format '%H:%M' """
minutes = int(round(hours * 60))
return "%02d:%02d" % divmod(minutes, 60)
class resource_resource(osv.osv):
_name = "resource.resource"
_description = "Resource Detail"
_columns = {
'name': fields.char("Name", required=True),
'code': fields.char('Code', size=16, copy=False),
'active' : fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the resource record without removing it."),
'company_id' : fields.many2one('res.company', 'Company'),
'resource_type': fields.selection([('user','Human'),('material','Material')], 'Resource Type', required=True),
'user_id' : fields.many2one('res.users', 'User', help='Related user name for the resource to manage its access.'),
'time_efficiency' : fields.float('Efficiency Factor', size=8, required=True, help="This field depict the efficiency of the resource to complete tasks. e.g resource put alone on a phase of 5 days with 5 tasks assigned to him, will show a load of 100% for this phase by default, but if we put a efficiency of 200%, then his load will only be 50%."),
'calendar_id' : fields.many2one("resource.calendar", "Working Time", help="Define the schedule of resource"),
}
_defaults = {
'resource_type' : 'user',
'time_efficiency' : 1,
'active' : True,
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'resource.resource', context=context)
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
if not default.get('name', False):
default.update(name=_('%s (copy)') % (self.browse(cr, uid, id, context=context).name))
return super(resource_resource, self).copy(cr, uid, id, default, context)
def generate_resources(self, cr, uid, user_ids, calendar_id, context=None):
"""
Return a list of Resource Class objects for the resources allocated to the phase.
NOTE: Used in project/project.py
"""
resource_objs = {}
user_pool = self.pool.get('res.users')
for user in user_pool.browse(cr, uid, user_ids, context=context):
resource_objs[user.id] = {
'name' : user.name,
'vacation': [],
'efficiency': 1.0,
}
resource_ids = self.search(cr, uid, [('user_id', '=', user.id)], context=context)
if resource_ids:
for resource in self.browse(cr, uid, resource_ids, context=context):
resource_objs[user.id]['efficiency'] = resource.time_efficiency
resource_cal = resource.calendar_id.id
if resource_cal:
leaves = self.compute_vacation(cr, uid, calendar_id, resource.id, resource_cal, context=context)
resource_objs[user.id]['vacation'] += list(leaves)
return resource_objs
def compute_vacation(self, cr, uid, calendar_id, resource_id=False, resource_calendar=False, context=None):
"""
Compute the vacation from the working calendar of the resource.
@param calendar_id : working calendar of the project
@param resource_id : resource working on phase/task
@param resource_calendar : working calendar of the resource
NOTE: used in project/project.py, and in generate_resources
"""
resource_calendar_leaves_pool = self.pool.get('resource.calendar.leaves')
leave_list = []
if resource_id:
leave_ids = resource_calendar_leaves_pool.search(cr, uid, ['|', ('calendar_id', '=', calendar_id),
('calendar_id', '=', resource_calendar),
('resource_id', '=', resource_id)
], context=context)
else:
leave_ids = resource_calendar_leaves_pool.search(cr, uid, [('calendar_id', '=', calendar_id),
('resource_id', '=', False)
], context=context)
leaves = resource_calendar_leaves_pool.read(cr, uid, leave_ids, ['date_from', 'date_to'], context=context)
for i in range(len(leaves)):
dt_start = datetime.datetime.strptime(leaves[i]['date_from'], '%Y-%m-%d %H:%M:%S')
dt_end = datetime.datetime.strptime(leaves[i]['date_to'], '%Y-%m-%d %H:%M:%S')
no = dt_end - dt_start
[leave_list.append((dt_start + datetime.timedelta(days=x)).strftime('%Y-%m-%d')) for x in range(int(no.days + 1))]
leave_list.sort()
return leave_list
def compute_working_calendar(self, cr, uid, calendar_id=False, context=None):
"""
Change the format of working calendar from 'Openerp' format to bring it into 'Faces' format.
@param calendar_id : working calendar of the project
NOTE: used in project/project.py
"""
if not calendar_id:
# Calendar is not specified: working days: 24/7
return [('fri', '8:0-12:0','13:0-17:0'), ('thu', '8:0-12:0','13:0-17:0'), ('wed', '8:0-12:0','13:0-17:0'),
('mon', '8:0-12:0','13:0-17:0'), ('tue', '8:0-12:0','13:0-17:0')]
resource_attendance_pool = self.pool.get('resource.calendar.attendance')
time_range = "8:00-8:00"
non_working = ""
week_days = {"0": "mon", "1": "tue", "2": "wed","3": "thu", "4": "fri", "5": "sat", "6": "sun"}
wk_days = {}
wk_time = {}
wktime_list = []
wktime_cal = []
week_ids = resource_attendance_pool.search(cr, uid, [('calendar_id', '=', calendar_id)], context=context)
weeks = resource_attendance_pool.read(cr, uid, week_ids, ['dayofweek', 'hour_from', 'hour_to'], context=context)
# Convert time formats into appropriate format required
# and create a list like [('mon', '8:00-12:00'), ('mon', '13:00-18:00')]
for week in weeks:
res_str = ""
day = None
if week_days.get(week['dayofweek'],False):
day = week_days[week['dayofweek']]
wk_days[week['dayofweek']] = week_days[week['dayofweek']]
else:
raise osv.except_osv(_('Configuration Error!'),_('Make sure the Working time has been configured with proper week days!'))
hour_from_str = hours_time_string(week['hour_from'])
hour_to_str = hours_time_string(week['hour_to'])
res_str = hour_from_str + '-' + hour_to_str
wktime_list.append((day, res_str))
# Convert into format like [('mon', '8:00-12:00', '13:00-18:00')]
for item in wktime_list:
if wk_time.has_key(item[0]):
wk_time[item[0]].append(item[1])
else:
wk_time[item[0]] = [item[0]]
wk_time[item[0]].append(item[1])
for k,v in wk_time.items():
wktime_cal.append(tuple(v))
# Add for the non-working days like: [('sat, sun', '8:00-8:00')]
for k, v in wk_days.items():
if week_days.has_key(k):
week_days.pop(k)
for v in week_days.itervalues():
non_working += v + ','
if non_working:
wktime_cal.append((non_working[:-1], time_range))
return wktime_cal
class resource_calendar_leaves(osv.osv):
_name = "resource.calendar.leaves"
_description = "Leave Detail"
_columns = {
'name' : fields.char("Name"),
'company_id' : fields.related('calendar_id','company_id',type='many2one',relation='res.company',string="Company", store=True, readonly=True),
'calendar_id' : fields.many2one("resource.calendar", "Working Time"),
'date_from' : fields.datetime('Start Date', required=True),
'date_to' : fields.datetime('End Date', required=True),
'resource_id' : fields.many2one("resource.resource", "Resource", help="If empty, this is a generic holiday for the company. If a resource is set, the holiday/leave is only for this resource"),
}
def check_dates(self, cr, uid, ids, context=None):
for leave in self.browse(cr, uid, ids, context=context):
if leave.date_from and leave.date_to and leave.date_from > leave.date_to:
return False
return True
_constraints = [
(check_dates, 'Error! leave start-date must be lower then leave end-date.', ['date_from', 'date_to'])
]
def onchange_resource(self, cr, uid, ids, resource, context=None):
result = {}
if resource:
resource_pool = self.pool.get('resource.resource')
result['calendar_id'] = resource_pool.browse(cr, uid, resource, context=context).calendar_id.id
return {'value': result}
return {'value': {'calendar_id': []}}
def seconds(td):
assert isinstance(td, datetime.timedelta)
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.**6
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
akretion/sale-workflow | refs/heads/8.0 | __unported__/product_special_type_invoice/__openerp__.py | 3 | # -*- coding: utf-8 -*-
#
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
"name": "Product Special Type on Invoice",
"version": "1.0",
"author": "Camptocamp",
"license": "AGPL-3",
"category": "Hidden/Links",
"description":
"""
According to the products special types (discount, advance, delivery),
compute totals on invoices.
""",
"website": "http://camptocamp.com",
"depends": ['product_special_type',
'account', ],
"init_xml": [],
"demo_xml": [],
"update_xml": [],
"active": False,
'installable': False
}
|
omni5cience/django-inlineformfield | refs/heads/master | .tox/py27/lib/python2.7/site-packages/django/db/backends/oracle/schema.py | 56 | import copy
import datetime
import binascii
from django.utils import six
from django.utils.text import force_text
from django.db.backends.schema import BaseDatabaseSchemaEditor
from django.db.utils import DatabaseError
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_alter_column_null = "MODIFY %(column)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s NOT NULL"
sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s"
sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS"
def quote_value(self, value):
if isinstance(value, (datetime.date, datetime.time, datetime.datetime)):
return "'%s'" % value
elif isinstance(value, six.string_types):
return "'%s'" % six.text_type(value).replace("\'", "\'\'")
elif isinstance(value, six.buffer_types):
return "'%s'" % force_text(binascii.hexlify(value))
elif isinstance(value, bool):
return "1" if value else "0"
else:
return str(value)
def delete_model(self, model):
# Run superclass action
super(DatabaseSchemaEditor, self).delete_model(model)
# Clean up any autoincrement trigger
self.execute("""
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 1 THEN
EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % {'sq_name': self.connection.ops._get_sequence_name(model._meta.db_table)})
def alter_field(self, model, old_field, new_field, strict=False):
try:
# Run superclass action
super(DatabaseSchemaEditor, self).alter_field(model, old_field, new_field, strict)
except DatabaseError as e:
description = str(e)
# If we're changing to/from LOB fields, we need to do a
# SQLite-ish workaround
if 'ORA-22858' in description or 'ORA-22859' in description:
self._alter_field_lob_workaround(model, old_field, new_field)
else:
raise
def _alter_field_lob_workaround(self, model, old_field, new_field):
"""
Oracle refuses to change a column type from/to LOB to/from a regular
column. In Django, this shows up when the field is changed from/to
a TextField.
What we need to do instead is:
- Add the desired field with a temporary name
- Update the table to transfer values from old to new
- Drop old column
- Rename the new column
"""
# Make a new field that's like the new one but with a temporary
# column name.
new_temp_field = copy.deepcopy(new_field)
new_temp_field.column = self._generate_temp_name(new_field.column)
# Add it
self.add_field(model, new_temp_field)
# Transfer values across
self.execute("UPDATE %s set %s=%s" % (
self.quote_name(model._meta.db_table),
self.quote_name(new_temp_field.column),
self.quote_name(old_field.column),
))
# Drop the old field
self.remove_field(model, old_field)
# Rename the new field
self.alter_field(model, new_temp_field, new_field)
# Close the connection to force cx_Oracle to get column types right
# on a new cursor
self.connection.close()
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':
nn = nn[1:-1]
return nn
def _generate_temp_name(self, for_name):
"""
Generates temporary names for workarounds that need temp columns
"""
suffix = hex(hash(for_name)).upper()[1:]
return self.normalize_name(for_name + "_" + suffix)
def prepare_default(self, value):
return self.quote_value(value)
|
CourseTalk/edx-platform | refs/heads/master | openedx/core/djangolib/markup.py | 5 | """
Utilities for use in Mako markup.
"""
import markupsafe
# Text() can be used to declare a string as plain text, as HTML() is used
# for HTML. It simply wraps markupsafe's escape, which will HTML-escape if
# it isn't already escaped.
Text = markupsafe.escape # pylint: disable=invalid-name
def HTML(html): # pylint: disable=invalid-name
"""
Mark a string as already HTML, so that it won't be escaped before output.
Use this function when formatting HTML into other strings. It must be
used in conjunction with ``Text()``, and both ``HTML()`` and ``Text()``
must be closed before any calls to ``format()``::
<%page expression_filter="h"/>
<%!
from django.utils.translation import ugettext as _
from openedx.core.djangolib.markup import Text, HTML
%>
${Text(_("Write & send {start}email{end}")).format(
start=HTML("<a href='mailto:{}'>".format(user.email),
end=HTML("</a>"),
)}
"""
return markupsafe.Markup(html)
|
yangdongsheng/autotest | refs/heads/master | client/kernel.py | 3 | import os, copy, pickle, re, glob, time, logging
from autotest.client import kernel_config, os_dep, kernelexpand
from autotest.client import utils
from autotest.client.shared import log, error
def tee_output_logdir_mark(fn):
def tee_logdir_mark_wrapper(self, *args, **dargs):
mark = self.__class__.__name__ + "." + fn.__name__
logging.info("--- START %s ---", mark)
self.job.logging.tee_redirect_debug_dir(self.log_dir)
try:
result = fn(self, *args, **dargs)
finally:
self.job.logging.restore()
logging.info("--- END %s ---", mark)
return result
tee_logdir_mark_wrapper.__name__ = fn.__name__
return tee_logdir_mark_wrapper
def _add_kernel_to_bootloader(bootloader, base_args, tag, args, image, initrd):
"""
Add a kernel with the specified tag to the boot config using the given
bootloader object. Also process the base_args and args kernel arguments
by removing all root= options and give the last root= option value to
the bootloader as a root device.
@param bootloader: bootloader object
@param base_args: base cmdline kernel arguments
@param tag: kernel tag
@param args: kernel cmdline arguments that are merged with base_args; a
root= option in "args" will override any from base_args
@param image: kernel image file
@param initrd: initrd file
"""
# remove existing entry if present
bootloader.remove_kernel(tag)
if base_args:
if args:
args = '%s %s' % (base_args, args)
else:
args = base_args
bootloader.add_kernel(path=image, title=tag, initrd=initrd, args=args)
class BootableKernel(object):
def __init__(self, job):
self.job = job
self.installed_as = None # kernel choice in bootloader menu
self.image = None
self.initrd = ''
def _boot_kernel(self, args, ident_check, expected_ident, subdir, notes):
"""
Boot a kernel, with post-boot kernel id check
@param args: kernel cmdline arguments
@param ident_check: check kernel id after boot
@param expected_ident:
@param subdir: job-step qualifier in status log
@param notes: additional comment in status log
"""
# If we can check the kernel identity do so.
if ident_check:
when = int(time.time())
args += " IDENT=%d" % when
self.job.next_step_prepend(["job.end_reboot_and_verify", when,
expected_ident, subdir, notes])
else:
self.job.next_step_prepend(["job.end_reboot", subdir,
expected_ident, notes])
self.add_to_bootloader(args)
# defer fsck for next reboot, to avoid reboots back to default kernel
utils.system('touch /fastboot') # this file is removed automatically
# Boot it.
self.job.start_reboot()
self.job.reboot(tag=self.installed_as)
def add_to_bootloader(self, args=''):
# Point bootloader to the selected tag.
_add_kernel_to_bootloader(self.job.bootloader,
self.job.config_get('boot.default_args'),
self.installed_as, args, self.image,
self.initrd)
class kernel(BootableKernel):
""" Class for compiling kernels.
Data for the object includes the src files
used to create the kernel, patches applied, config (base + changes),
the build directory itself, and logged output
Properties:
job
Backpointer to the job object we're part of
autodir
Path to the top level autotest dir (see global_config.ini,
session COMMON/autotest_top_path)
src_dir
<tmp_dir>/src/
build_dir
<tmp_dir>/linux/
config_dir
<results_dir>/config/
log_dir
<results_dir>/debug/
results_dir
<results_dir>/results/
"""
autodir = ''
def __init__(self, job, base_tree, subdir, tmp_dir, build_dir, leave=False):
"""Initialize the kernel build environment
job
which job this build is part of
base_tree
base kernel tree. Can be one of the following:
1. A local tarball
2. A URL to a tarball
3. A local directory (will symlink it)
4. A shorthand expandable (eg '2.6.11-git3')
subdir
subdir in the results directory (eg "build")
(holds config/, debug/, results/)
tmp_dir
leave
Boolean, whether to leave existing tmpdir or not
"""
super(kernel, self).__init__(job)
self.autodir = job.autodir
self.src_dir = os.path.join(tmp_dir, 'src')
self.build_dir = os.path.join(tmp_dir, build_dir)
# created by get_kernel_tree
self.config_dir = os.path.join(subdir, 'config')
self.log_dir = os.path.join(subdir, 'debug')
self.results_dir = os.path.join(subdir, 'results')
self.subdir = os.path.basename(subdir)
if not leave:
if os.path.isdir(self.src_dir):
utils.system('rm -rf ' + self.src_dir)
if os.path.isdir(self.build_dir):
utils.system('rm -rf ' + self.build_dir)
if not os.path.exists(self.src_dir):
os.mkdir(self.src_dir)
for path in [self.config_dir, self.log_dir, self.results_dir]:
if os.path.exists(path):
utils.system('rm -rf ' + path)
os.mkdir(path)
logpath = os.path.join(self.log_dir, 'build_log')
self.logfile = open(logpath, 'w+')
self.applied_patches = []
self.target_arch = None
self.build_target = 'bzImage'
self.build_image = None
arch = utils.get_current_kernel_arch()
if arch == 's390' or arch == 's390x':
self.build_target = 'image'
elif arch == 'ia64':
self.build_target = 'all'
self.build_image = 'vmlinux.gz'
if not leave:
self.logfile.write('BASE: %s\n' % base_tree)
# Where we have direct version hint record that
# for later configuration selection.
shorthand = re.compile(r'^\d+\.\d+\.\d+')
if shorthand.match(base_tree):
self.base_tree_version = base_tree
else:
self.base_tree_version = None
# Actually extract the tree. Make sure we know it occured
self.extract(base_tree)
def kernelexpand(self, kernel):
# If we have something like a path, just use it as it is
if '/' in kernel:
return [kernel]
# Find the configured mirror list.
mirrors = self.job.config_get('mirror.mirrors')
if not mirrors:
# LEGACY: convert the kernel.org mirror
mirror = self.job.config_get('mirror.ftp_kernel_org')
if mirror:
korg = 'http://www.kernel.org/pub/linux/kernel'
mirrors = [
[ korg + '/v2.6', mirror + '/v2.6' ],
[ korg + '/people/akpm/patches/2.6', mirror + '/akpm' ],
[ korg + '/people/mbligh', mirror + '/mbligh' ],
]
patches = kernelexpand.expand_classic(kernel, mirrors)
print patches
return patches
@log.record
@tee_output_logdir_mark
def extract(self, base_tree):
if os.path.exists(base_tree):
self.get_kernel_tree(base_tree)
else:
base_components = self.kernelexpand(base_tree)
print 'kernelexpand: '
print base_components
self.get_kernel_tree(base_components.pop(0))
if base_components: # apply remaining patches
self.patch(*base_components)
@log.record
@tee_output_logdir_mark
def patch(self, *patches):
"""Apply a list of patches (in order)"""
if not patches:
return
print 'Applying patches: ', patches
self.apply_patches(self.get_patches(patches))
@log.record
@tee_output_logdir_mark
def config(self, config_file='', config_list=None, defconfig=False,
make=None):
self.set_cross_cc()
kernel_config.kernel_config(self.job, self.build_dir, self.config_dir,
config_file, config_list, defconfig,
self.base_tree_version, make)
def get_patches(self, patches):
"""fetch the patches to the local src_dir"""
local_patches = []
for patch in patches:
dest = os.path.join(self.src_dir, os.path.basename(patch))
# FIXME: this isn't unique. Append something to it
# like wget does if it's not there?
print "get_file %s %s %s %s" % (patch, dest, self.src_dir,
os.path.basename(patch))
utils.get_file(patch, dest)
# probably safer to use the command, not python library
md5sum = utils.system_output('md5sum ' + dest).split()[0]
local_patches.append((patch, dest, md5sum))
return local_patches
def apply_patches(self, local_patches):
"""apply the list of patches, in order"""
builddir = self.build_dir
os.chdir(builddir)
if not local_patches:
return None
for (spec, local, md5sum) in local_patches:
if local.endswith('.bz2') or local.endswith('.gz'):
ref = spec
else:
ref = utils.force_copy(local, self.results_dir)
ref = self.job.relative_path(ref)
patch_id = "%s %s %s" % (spec, ref, md5sum)
log = "PATCH: " + patch_id + "\n"
print log
utils.cat_file_to_cmd(local, 'patch -p1 > /dev/null')
self.logfile.write(log)
self.applied_patches.append(patch_id)
def get_kernel_tree(self, base_tree):
"""Extract/link base_tree to self.build_dir"""
# if base_tree is a dir, assume uncompressed kernel
if os.path.isdir(base_tree):
print 'Symlinking existing kernel source'
if os.path.islink(self.build_dir):
os.remove(self.build_dir)
os.symlink(base_tree, self.build_dir)
# otherwise, extract tarball
else:
os.chdir(os.path.dirname(self.src_dir))
# Figure out local destination for tarball
tarball = os.path.join(self.src_dir, os.path.basename(base_tree.split(';')[0]))
utils.get_file(base_tree, tarball)
print 'Extracting kernel tarball:', tarball, '...'
utils.extract_tarball_to_dir(tarball, self.build_dir)
def extraversion(self, tag, append=True):
os.chdir(self.build_dir)
extraversion_sub = r's/^CONFIG_LOCALVERSION=\s*"\(.*\)"/CONFIG_LOCALVERSION='
cfg = self.build_dir + '/.config'
if append:
p = extraversion_sub + '"\\1-%s"/' % tag
else:
p = extraversion_sub + '"-%s"/' % tag
if os.path.exists(cfg):
utils.system('mv %s %s.old' % (cfg, cfg))
utils.system("sed '%s' < %s.old > %s" % (p, cfg, cfg))
self.config(make='oldconfig')
else:
self.config()
@log.record
@tee_output_logdir_mark
def build(self, make_opts='', logfile ='', extraversion='autotest'):
"""build the kernel
make_opts
additional options to make, if any
"""
os_dep.commands('gcc', 'make')
if logfile == '':
logfile = os.path.join(self.log_dir, 'kernel_build')
os.chdir(self.build_dir)
if extraversion:
self.extraversion(extraversion)
self.set_cross_cc()
# setup_config_file(config_file, config_overrides)
# Not needed on 2.6, but hard to tell -- handle failure
utils.system('make dep', ignore_status=True)
threads = 2 * utils.count_cpus()
build_string = 'make -j %d %s %s' % (threads, make_opts,
self.build_target)
# eg make bzImage, or make zImage
print build_string
utils.system(build_string)
if kernel_config.modules_needed('.config'):
utils.system('make -j %d modules' % (threads))
kernel_version = self.get_kernel_build_ver()
kernel_version = re.sub('-autotest', '', kernel_version)
self.logfile.write('BUILD VERSION: %s\n' % kernel_version)
utils.force_copy(self.build_dir + '/System.map', self.results_dir)
def build_timed(self, threads, timefile='/dev/null', make_opts='',
output='/dev/null'):
"""time the bulding of the kernel"""
os.chdir(self.build_dir)
self.set_cross_cc()
self.clean()
build_string = ("/usr/bin/time -o %s make %s -j %s vmlinux" %
(timefile, make_opts, threads))
build_string += ' > %s 2>&1' % output
print build_string
utils.system(build_string)
if (not os.path.isfile('vmlinux')):
errmsg = "no vmlinux found, kernel build failed"
raise error.TestError(errmsg)
@log.record
@tee_output_logdir_mark
def clean(self):
"""make clean in the kernel tree"""
os.chdir(self.build_dir)
print "make clean"
utils.system('make clean > /dev/null 2> /dev/null')
@log.record
@tee_output_logdir_mark
def mkinitrd(self, version, image, system_map, initrd):
"""Build kernel initrd image.
Try to use distro specific way to build initrd image.
Parameters:
version
new kernel version
image
new kernel image file
system_map
System.map file
initrd
initrd image file to build
"""
vendor = utils.get_os_vendor()
if os.path.isfile(initrd):
print "Existing %s file, will remove it." % initrd
os.remove(initrd)
args = self.job.config_get('kernel.mkinitrd_extra_args')
# don't leak 'None' into mkinitrd command
if not args:
args = ''
# It is important to match the version with a real directory inside
# /lib/modules
real_version_list = glob.glob('/lib/modules/%s*' % version)
rl = len(real_version_list)
if rl == 0:
logging.error("No directory %s found under /lib/modules. Initramfs"
"creation will most likely fail and your new kernel"
"will fail to build", version)
else:
if rl > 1:
logging.warning("Found more than one possible match for "
"kernel version %s under /lib/modules", version)
version = os.path.basename(real_version_list[0])
if vendor in ['Red Hat', 'Fedora']:
try:
cmd = os_dep.command('dracut')
full_cmd = '%s -f %s %s' % (cmd, initrd, version)
except ValueError:
cmd = os_dep.command('mkinitrd')
full_cmd = '%s %s %s %s' % (cmd, args, initrd, version)
utils.system(full_cmd)
elif vendor in ['SUSE']:
utils.system('mkinitrd %s -k %s -i %s -M %s' %
(args, image, initrd, system_map))
elif vendor in ['Debian', 'Ubuntu']:
if os.path.isfile('/usr/sbin/mkinitrd'):
cmd = '/usr/sbin/mkinitrd'
elif os.path.isfile('/usr/sbin/mkinitramfs'):
cmd = '/usr/sbin/mkinitramfs'
else:
raise error.TestError('No Debian initrd builder')
utils.system('%s %s -o %s %s' % (cmd, args, initrd, version))
else:
raise error.TestError('Unsupported vendor %s' % vendor)
def set_build_image(self, image):
self.build_image = image
@log.record
@tee_output_logdir_mark
def install(self, tag='autotest', prefix='/', install_vmlinux=True):
"""make install in the kernel tree"""
# Record that we have installed the kernel, and
# the tag under which we installed it.
self.installed_as = tag
os.chdir(self.build_dir)
if not os.path.isdir(prefix):
os.mkdir(prefix)
self.boot_dir = os.path.join(prefix, 'boot')
if not os.path.isdir(self.boot_dir):
os.mkdir(self.boot_dir)
if not self.build_image:
images = glob.glob('arch/*/boot/' + self.build_target)
if len(images):
self.build_image = images[0]
else:
self.build_image = self.build_target
# remember installed files
self.vmlinux = self.boot_dir + '/vmlinux-' + tag
if (self.build_image != 'vmlinux'):
self.image = self.boot_dir + '/vmlinuz-' + tag
else:
self.image = self.vmlinux
install_vmlinux = True
self.system_map = self.boot_dir + '/System.map-' + tag
self.config_file = self.boot_dir + '/config-' + tag
self.initrd = ''
# copy to boot dir
if install_vmlinux:
utils.force_copy('vmlinux', self.vmlinux)
if (self.build_image != 'vmlinux'):
utils.force_copy(self.build_image, self.image)
utils.force_copy('System.map', self.system_map)
utils.force_copy('.config', self.config_file)
if not kernel_config.modules_needed('.config'):
return
utils.system('make modules_install INSTALL_MOD_PATH=%s' % prefix)
if prefix == '/':
self.initrd = self.boot_dir + '/initrd-' + tag
self.mkinitrd(self.get_kernel_build_ver(), self.image,
self.system_map, self.initrd)
def get_kernel_build_arch(self, arch=None):
"""
Work out the current kernel architecture (as a kernel arch)
"""
if not arch:
arch = utils.get_current_kernel_arch()
if re.match('i.86', arch):
return 'i386'
elif re.match('sun4u', arch):
return 'sparc64'
elif re.match('arm.*', arch):
return 'arm'
elif re.match('sa110', arch):
return 'arm'
elif re.match('s390x', arch):
return 's390'
elif re.match('parisc64', arch):
return 'parisc'
elif re.match('ppc.*', arch):
return 'powerpc'
elif re.match('mips.*', arch):
return 'mips'
else:
return arch
def get_kernel_build_release(self):
releasem = re.compile(r'.*UTS_RELEASE\s+"([^"]+)".*');
versionm = re.compile(r'.*UTS_VERSION\s+"([^"]+)".*');
release = None
version = None
for f in [self.build_dir + "/include/linux/version.h",
self.build_dir + "/include/linux/utsrelease.h",
self.build_dir + "/include/linux/compile.h",
self.build_dir + "/include/generated/utsrelease.h",
self.build_dir + "/include/generated/compile.h"]:
if os.path.exists(f):
fd = open(f, 'r')
for line in fd.readlines():
m = releasem.match(line)
if m:
release = m.groups()[0]
m = versionm.match(line)
if m:
version = m.groups()[0]
fd.close()
return (release, version)
def get_kernel_build_ident(self):
(release, version) = self.get_kernel_build_release()
if not release or not version:
raise error.JobError('kernel has no identity')
return release + '::' + version
def boot(self, args='', ident=True):
""" install and boot this kernel, do not care how
just make it happen.
"""
# If the kernel has not yet been installed,
# install it now as default tag.
if not self.installed_as:
self.install()
expected_ident = self.get_kernel_build_ident()
self._boot_kernel(args, ident, expected_ident,
self.subdir, self.applied_patches)
def get_kernel_build_ver(self):
"""Check Makefile and .config to return kernel version"""
version = patchlevel = sublevel = extraversion = localversion = ''
for line in open(self.build_dir + '/Makefile', 'r').readlines():
if line.startswith('VERSION'):
version = line[line.index('=') + 1:].strip()
if line.startswith('PATCHLEVEL'):
patchlevel = line[line.index('=') + 1:].strip()
if line.startswith('SUBLEVEL'):
sublevel = line[line.index('=') + 1:].strip()
if line.startswith('EXTRAVERSION'):
extraversion = line[line.index('=') + 1:].strip()
for line in open(self.build_dir + '/.config', 'r').readlines():
if line.startswith('CONFIG_LOCALVERSION='):
localversion = line.rstrip().split('"')[1]
return "%s.%s.%s%s%s" %(version, patchlevel, sublevel, extraversion, localversion)
def set_build_target(self, build_target):
if build_target:
self.build_target = build_target
print 'BUILD TARGET: %s' % self.build_target
def set_cross_cc(self, target_arch=None, cross_compile=None,
build_target='bzImage'):
"""Set up to cross-compile.
This is broken. We need to work out what the default
compile produces, and if not, THEN set the cross
compiler.
"""
if self.target_arch:
return
# if someone has set build_target, don't clobber in set_cross_cc
# run set_build_target before calling set_cross_cc
if not self.build_target:
self.set_build_target(build_target)
# If no 'target_arch' given assume native compilation
if target_arch is None:
target_arch = utils.get_current_kernel_arch()
if target_arch == 'ppc64':
if self.build_target == 'bzImage':
self.build_target = 'vmlinux'
if not cross_compile:
cross_compile = self.job.config_get('kernel.cross_cc')
if cross_compile:
os.environ['CROSS_COMPILE'] = cross_compile
else:
if os.environ.has_key('CROSS_COMPILE'):
del os.environ['CROSS_COMPILE']
return # HACK. Crap out for now.
# At this point I know what arch I *want* to build for
# but have no way of working out what arch the default
# compiler DOES build for.
def install_package(package):
raise NotImplementedError("I don't exist yet!")
if target_arch == 'ppc64':
install_package('ppc64-cross')
cross_compile = os.path.join(self.autodir, 'sources/ppc64-cross/bin')
elif target_arch == 'x86_64':
install_package('x86_64-cross')
cross_compile = os.path.join(self.autodir, 'sources/x86_64-cross/bin')
os.environ['ARCH'] = self.target_arch = target_arch
self.cross_compile = cross_compile
if self.cross_compile:
os.environ['CROSS_COMPILE'] = self.cross_compile
def pickle_dump(self, filename):
"""dump a pickle of ourself out to the specified filename
we can't pickle the backreference to job (it contains fd's),
nor would we want to. Same for logfile (fd's).
"""
temp = copy.copy(self)
temp.job = None
temp.logfile = None
pickle.dump(temp, open(filename, 'w'))
class rpm_kernel(BootableKernel):
"""
Class for installing a binary rpm kernel package
"""
def __init__(self, job, rpm_package, subdir):
super(rpm_kernel, self).__init__(job)
self.rpm_package = rpm_package
self.log_dir = os.path.join(subdir, 'debug')
self.subdir = os.path.basename(subdir)
if os.path.exists(self.log_dir):
utils.system('rm -rf ' + self.log_dir)
os.mkdir(self.log_dir)
def build(self, *args, **dargs):
"""
Dummy function, binary kernel so nothing to build.
"""
pass
@log.record
@tee_output_logdir_mark
def install(self, tag='autotest', install_vmlinux=True):
self.installed_as = tag
self.image = None
self.initrd = ''
for rpm_pack in self.rpm_package:
rpm_name = utils.system_output('rpm -qp ' + rpm_pack)
# install
utils.system('rpm -i --force ' + rpm_pack)
# get file list
files = utils.system_output('rpm -ql ' + rpm_name).splitlines()
# search for vmlinuz
for file in files:
if file.startswith('/boot/vmlinuz'):
self.full_version = file[len('/boot/vmlinuz-'):]
self.image = file
self.rpm_flavour = rpm_name.split('-')[1]
# get version and release number
self.version, self.release = utils.system_output(
'rpm --queryformat="%{VERSION}\\n%{RELEASE}\\n" -q '
+ rpm_name).splitlines()[0:2]
# prefer /boot/kernel-version before /boot/kernel
if self.full_version:
break
# search for initrd
for file in files:
if file.startswith('/boot/init'):
self.initrd = file
# prefer /boot/initrd-version before /boot/initrd
if len(file) > len('/boot/initrd'):
break
if self.image == None:
errmsg = "specified rpm file(s) don't contain /boot/vmlinuz"
raise error.TestError(errmsg)
# install vmlinux
if install_vmlinux:
for rpm_pack in self.rpm_package:
vmlinux = utils.system_output(
'rpm -q -l -p %s | grep /boot/vmlinux' % rpm_pack)
utils.system('cd /; rpm2cpio %s | cpio -imuv .%s 2>&1'
% (rpm_pack, vmlinux))
if not os.path.exists(vmlinux):
raise error.TestError('%s does not exist after installing %s'
% (vmlinux, rpm_pack))
def boot(self, args='', ident=True):
""" install and boot this kernel
"""
# If the kernel has not yet been installed,
# install it now as default tag.
if not self.installed_as:
self.install()
expected_ident = self.full_version
if not expected_ident:
expected_ident = '-'.join([self.version,
self.rpm_flavour,
self.release])
self._boot_kernel(args, ident, expected_ident,
None, 'rpm')
class rpm_kernel_suse(rpm_kernel):
""" Class for installing openSUSE/SLE rpm kernel package
"""
def install(self):
# do not set the new kernel as the default one
os.environ['PBL_AUTOTEST'] = '1'
rpm_kernel.install(self, 'dummy')
self.installed_as = self.job.bootloader.get_title_for_kernel(self.image)
if not self.installed_as:
errmsg = "cannot find installed kernel in bootloader configuration"
raise error.TestError(errmsg)
def add_to_bootloader(self, tag='dummy', args=''):
""" Set parameters of this kernel in bootloader
"""
# pull the base argument set from the job config
baseargs = self.job.config_get('boot.default_args')
if baseargs:
args = baseargs + ' ' + args
self.job.bootloader.add_args(tag, args)
def rpm_kernel_vendor(job, rpm_package, subdir):
vendor = utils.get_os_vendor()
if vendor == "SUSE":
return rpm_kernel_suse(job, rpm_package, subdir)
else:
return rpm_kernel(job, rpm_package, subdir)
# just make the preprocessor a nop
def _preprocess_path_dummy(path):
return path.strip()
# pull in some optional site-specific path pre-processing
preprocess_path = utils.import_site_function(__file__,
"autotest.client.site_kernel", "preprocess_path",
_preprocess_path_dummy)
def auto_kernel(job, path, subdir, tmp_dir, build_dir, leave=False):
"""
Create a kernel object, dynamically selecting the appropriate class to use
based on the path provided.
"""
kernel_paths = [preprocess_path(path)]
if kernel_paths[0].endswith('.list'):
# Fetch the list of packages to install
kernel_list = os.path.join(tmp_dir, 'kernel.list')
utils.get_file(kernel_paths[0], kernel_list)
kernel_paths = [p.strip() for p in open(kernel_list).readlines()]
if kernel_paths[0].endswith('.rpm'):
rpm_paths = []
for kernel_path in kernel_paths:
if os.path.exists(kernel_path):
rpm_paths.append(kernel_path)
else:
# Fetch the rpm into the job's packages directory and pass it to
# rpm_kernel
rpm_name = os.path.basename(kernel_path)
# If the preprocessed path (kernel_path) is only a name then
# search for the kernel in all the repositories, else fetch the
# kernel from that specific path.
job.pkgmgr.fetch_pkg(rpm_name, os.path.join(job.pkgdir, rpm_name),
repo_url=os.path.dirname(kernel_path))
rpm_paths.append(os.path.join(job.pkgdir, rpm_name))
return rpm_kernel_vendor(job, rpm_paths, subdir)
else:
if len(kernel_paths) > 1:
raise error.TestError("don't know what to do with more than one non-rpm kernel file")
return kernel(job, kernel_paths[0], subdir, tmp_dir, build_dir, leave)
|
bboreham/docker-py | refs/heads/master | docker/__init__.py | 64 | # Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .version import version, version_info
__version__ = version
__title__ = 'docker-py'
from .client import Client, AutoVersionClient # flake8: noqa
|
CTSRD-CHERI/u-boot | refs/heads/master | test/py/tests/test_mmc_rd.py | 8 | # SPDX-License-Identifier: GPL-2.0
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
# Test U-Boot's "mmc read" command. The test reads data from the eMMC or SD
# card, and validates the no errors occurred, and that the expected data was
# read if the test configuration contains a CRC of the expected data.
import pytest
import time
import u_boot_utils
"""
This test relies on boardenv_* to containing configuration values to define
which MMC devices should be tested. For example:
# Configuration data for test_mmc_dev, test_mmc_rescan, test_mmc_info; defines
# whole MMC devices that mmc dev/rescan/info commands may operate upon.
env__mmc_dev_configs = (
{
'fixture_id': 'emmc-boot0',
'is_emmc': True,
'devid': 0,
'partid': 1,
'info_device': ???,
'info_speed': ???,
'info_mode': ???,
'info_buswidth': ???.
},
{
'fixture_id': 'emmc-boot1',
'is_emmc': True,
'devid': 0,
'partid': 2,
'info_device': ???,
'info_speed': ???,
'info_mode': ???,
'info_buswidth': ???.
},
{
'fixture_id': 'emmc-data',
'is_emmc': True,
'devid': 0,
'partid': 0,
'info_device': ???,
'info_speed': ???,
'info_mode': ???,
'info_buswidth': ???.
},
{
'fixture_id': 'sd',
'is_emmc': False,
'devid': 1,
'partid': None,
'info_device': ???,
'info_speed': ???,
'info_mode': ???,
'info_buswidth': ???.
},
)
# Configuration data for test_mmc_rd; defines regions of the MMC (entire
# devices, or ranges of sectors) which can be read:
env__mmc_rd_configs = (
{
'fixture_id': 'emmc-boot0',
'is_emmc': True,
'devid': 0,
'partid': 1,
'sector': 0x10,
'count': 1,
},
{
'fixture_id': 'emmc-boot1',
'is_emmc': True,
'devid': 0,
'partid': 2,
'sector': 0x10,
'count': 1,
},
{
'fixture_id': 'emmc-data',
'is_emmc': True,
'devid': 0,
'partid': 0,
'sector': 0x10,
'count': 0x1000,
},
{
'fixture_id': 'sd-mbr',
'is_emmc': False,
'devid': 1,
'partid': None,
'sector': 0,
'count': 1,
'crc32': '8f6ecf0d',
},
{
'fixture_id': 'sd-large',
'is_emmc': False,
'devid': 1,
'partid': None,
'sector': 0x10,
'count': 0x1000,
},
)
"""
def mmc_dev(u_boot_console, is_emmc, devid, partid):
"""Run the "mmc dev" command.
Args:
u_boot_console: A U-Boot console connection.
is_emmc: Whether the device is eMMC
devid: Device ID
partid: Partition ID
Returns:
Nothing.
"""
# Select MMC device
cmd = 'mmc dev %d' % devid
if is_emmc:
cmd += ' %d' % partid
response = u_boot_console.run_command(cmd)
assert 'no card present' not in response
if is_emmc:
partid_response = '(part %d)' % partid
else:
partid_response = ''
good_response = 'mmc%d%s is current device' % (devid, partid_response)
assert good_response in response
@pytest.mark.buildconfigspec('cmd_mmc')
def test_mmc_dev(u_boot_console, env__mmc_dev_config):
"""Test the "mmc dev" command.
Args:
u_boot_console: A U-Boot console connection.
env__mmc_dev_config: The single MMC configuration on which
to run the test. See the file-level comment above for details
of the format.
Returns:
Nothing.
"""
is_emmc = env__mmc_dev_config['is_emmc']
devid = env__mmc_dev_config['devid']
partid = env__mmc_dev_config.get('partid', 0)
# Select MMC device
mmc_dev(u_boot_console, is_emmc, devid, partid)
@pytest.mark.buildconfigspec('cmd_mmc')
def test_mmc_rescan(u_boot_console, env__mmc_dev_config):
"""Test the "mmc rescan" command.
Args:
u_boot_console: A U-Boot console connection.
env__mmc_dev_config: The single MMC configuration on which
to run the test. See the file-level comment above for details
of the format.
Returns:
Nothing.
"""
is_emmc = env__mmc_dev_config['is_emmc']
devid = env__mmc_dev_config['devid']
partid = env__mmc_dev_config.get('partid', 0)
# Select MMC device
mmc_dev(u_boot_console, is_emmc, devid, partid)
# Rescan MMC device
cmd = 'mmc rescan'
response = u_boot_console.run_command(cmd)
assert 'no card present' not in response
@pytest.mark.buildconfigspec('cmd_mmc')
def test_mmc_info(u_boot_console, env__mmc_dev_config):
"""Test the "mmc info" command.
Args:
u_boot_console: A U-Boot console connection.
env__mmc_dev_config: The single MMC configuration on which
to run the test. See the file-level comment above for details
of the format.
Returns:
Nothing.
"""
is_emmc = env__mmc_dev_config['is_emmc']
devid = env__mmc_dev_config['devid']
partid = env__mmc_dev_config.get('partid', 0)
info_device = env__mmc_dev_config['info_device']
info_speed = env__mmc_dev_config['info_speed']
info_mode = env__mmc_dev_config['info_mode']
info_buswidth = env__mmc_dev_config['info_buswidth']
# Select MMC device
mmc_dev(u_boot_console, is_emmc, devid, partid)
# Read MMC device information
cmd = 'mmc info'
response = u_boot_console.run_command(cmd)
good_response = "Device: %s" % info_device
assert good_response in response
good_response = "Bus Speed: %s" % info_speed
assert good_response in response
good_response = "Mode: %s" % info_mode
assert good_response in response
good_response = "Bus Width: %s" % info_buswidth
assert good_response in response
@pytest.mark.buildconfigspec('cmd_mmc')
def test_mmc_rd(u_boot_console, env__mmc_rd_config):
"""Test the "mmc read" command.
Args:
u_boot_console: A U-Boot console connection.
env__mmc_rd_config: The single MMC configuration on which
to run the test. See the file-level comment above for details
of the format.
Returns:
Nothing.
"""
is_emmc = env__mmc_rd_config['is_emmc']
devid = env__mmc_rd_config['devid']
partid = env__mmc_rd_config.get('partid', 0)
sector = env__mmc_rd_config.get('sector', 0)
count_sectors = env__mmc_rd_config.get('count', 1)
expected_crc32 = env__mmc_rd_config.get('crc32', None)
read_duration_max = env__mmc_rd_config.get('read_duration_max', 0)
count_bytes = count_sectors * 512
bcfg = u_boot_console.config.buildconfig
has_cmd_memory = bcfg.get('config_cmd_memory', 'n') == 'y'
has_cmd_crc32 = bcfg.get('config_cmd_crc32', 'n') == 'y'
ram_base = u_boot_utils.find_ram_base(u_boot_console)
addr = '0x%08x' % ram_base
# Select MMC device
mmc_dev(u_boot_console, is_emmc, devid, partid)
# Clear target RAM
if expected_crc32:
if has_cmd_memory and has_cmd_crc32:
cmd = 'mw.b %s 0 0x%x' % (addr, count_bytes)
u_boot_console.run_command(cmd)
cmd = 'crc32 %s 0x%x' % (addr, count_bytes)
response = u_boot_console.run_command(cmd)
assert expected_crc32 not in response
else:
u_boot_console.log.warning(
'CONFIG_CMD_MEMORY or CONFIG_CMD_CRC32 != y: Skipping RAM clear')
# Read data
cmd = 'mmc read %s %x %x' % (addr, sector, count_sectors)
tstart = time.time()
response = u_boot_console.run_command(cmd)
tend = time.time()
good_response = 'MMC read: dev # %d, block # %d, count %d ... %d blocks read: OK' % (
devid, sector, count_sectors, count_sectors)
assert good_response in response
# Check target RAM
if expected_crc32:
if has_cmd_crc32:
cmd = 'crc32 %s 0x%x' % (addr, count_bytes)
response = u_boot_console.run_command(cmd)
assert expected_crc32 in response
else:
u_boot_console.log.warning('CONFIG_CMD_CRC32 != y: Skipping check')
# Check if the command did not take too long
if read_duration_max:
elapsed = tend - tstart
u_boot_console.log.info('Reading %d bytes took %f seconds' %
(count_bytes, elapsed))
assert elapsed <= (read_duration_max - 0.01)
|
nopjmp/SickRage | refs/heads/master | lib/pyasn1/codec/__init__.py | 3653 | # This file is necessary to make this directory a package.
|
cg31/tensorflow | refs/heads/master | tensorflow/contrib/rnn/python/kernel_tests/fused_rnn_cell_test.py | 10 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.rnn.python.ops.fused_rnn_cell."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class FusedRnnCellTest(tf.test.TestCase):
def testBasicRNNFusedWrapper(self):
"""This test checks that using a wrapper for BasicRNN works as expected."""
with self.test_session() as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=19890212)
cell = tf.nn.rnn_cell.BasicRNNCell(10)
batch_size = 5
input_size = 20
timelen = 15
inputs = tf.constant(np.random.randn(timelen, batch_size, input_size))
with tf.variable_scope("basic", initializer=initializer):
unpacked_inputs = tf.unpack(inputs)
outputs, state = tf.nn.rnn(cell, unpacked_inputs, dtype=tf.float64)
packed_outputs = tf.pack(outputs)
basic_vars = [v for v in tf.trainable_variables()
if v.name.startswith("basic/")]
sess.run([tf.initialize_all_variables()])
basic_outputs, basic_state = sess.run([packed_outputs, state])
basic_grads = sess.run(tf.gradients(packed_outputs, inputs))
basic_wgrads = sess.run(tf.gradients(packed_outputs, basic_vars))
with tf.variable_scope("fused_static", initializer=initializer):
fused_cell = tf.contrib.rnn.FusedRNNCellAdaptor(cell)
outputs, state = fused_cell(inputs, dtype=tf.float64)
fused_static_vars = [v for v in tf.trainable_variables()
if v.name.startswith("fused_static/")]
sess.run([tf.initialize_all_variables()])
fused_static_outputs, fused_static_state = sess.run([outputs, state])
fused_static_grads = sess.run(tf.gradients(outputs, inputs))
fused_static_wgrads = sess.run(tf.gradients(outputs, fused_static_vars))
self.assertAllClose(basic_outputs, fused_static_outputs)
self.assertAllClose(basic_state, fused_static_state)
self.assertAllClose(basic_grads, fused_static_grads)
for basic, fused in zip(basic_wgrads, fused_static_wgrads):
self.assertAllClose(basic, fused, rtol=1e-2, atol=1e-2)
with tf.variable_scope("fused_dynamic", initializer=initializer):
fused_cell = tf.contrib.rnn.FusedRNNCellAdaptor(
cell, use_dynamic_rnn=True)
outputs, state = fused_cell(inputs, dtype=tf.float64)
fused_dynamic_vars = [v for v in tf.trainable_variables()
if v.name.startswith("fused_dynamic/")]
sess.run([tf.initialize_all_variables()])
fused_dynamic_outputs, fused_dynamic_state = sess.run([outputs, state])
fused_dynamic_grads = sess.run(tf.gradients(outputs, inputs))
fused_dynamic_wgrads = sess.run(
tf.gradients(outputs, fused_dynamic_vars))
self.assertAllClose(basic_outputs, fused_dynamic_outputs)
self.assertAllClose(basic_state, fused_dynamic_state)
self.assertAllClose(basic_grads, fused_dynamic_grads)
for basic, fused in zip(basic_wgrads, fused_dynamic_wgrads):
self.assertAllClose(basic, fused, rtol=1e-2, atol=1e-2)
def testTimeReversedFusedRNN(self):
with self.test_session() as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=19890213)
cell = tf.nn.rnn_cell.BasicRNNCell(10)
batch_size = 5
input_size = 20
timelen = 15
inputs = tf.constant(np.random.randn(timelen, batch_size, input_size))
# test bi-directional rnn
with tf.variable_scope("basic", initializer=initializer):
unpacked_inputs = tf.unpack(inputs)
outputs, fw_state, bw_state = tf.nn.bidirectional_rnn(
cell, cell, unpacked_inputs, dtype=tf.float64)
packed_outputs = tf.pack(outputs)
basic_vars = [v for v in tf.trainable_variables()
if v.name.startswith("basic/")]
sess.run([tf.initialize_all_variables()])
basic_outputs, basic_fw_state, basic_bw_state = sess.run(
[packed_outputs, fw_state, bw_state])
basic_grads = sess.run(tf.gradients(packed_outputs, inputs))
basic_wgrads = sess.run(tf.gradients(packed_outputs, basic_vars))
with tf.variable_scope("fused", initializer=initializer):
fused_cell = tf.contrib.rnn.FusedRNNCellAdaptor(cell)
fused_bw_cell = tf.contrib.rnn.TimeReversedFusedRNN(fused_cell)
fw_outputs, fw_state = fused_cell(inputs, dtype=tf.float64, scope="fw")
bw_outputs, bw_state = fused_bw_cell(
inputs, dtype=tf.float64, scope="bw")
outputs = tf.concat(2, [fw_outputs, bw_outputs])
fused_vars = [v for v in tf.trainable_variables()
if v.name.startswith("fused/")]
sess.run([tf.initialize_all_variables()])
fused_outputs, fused_fw_state, fused_bw_state = sess.run(
[outputs, fw_state, bw_state])
fused_grads = sess.run(tf.gradients(outputs, inputs))
fused_wgrads = sess.run(tf.gradients(outputs, fused_vars))
self.assertAllClose(basic_outputs, fused_outputs)
self.assertAllClose(basic_fw_state, fused_fw_state)
self.assertAllClose(basic_bw_state, fused_bw_state)
self.assertAllClose(basic_grads, fused_grads)
for basic, fused in zip(basic_wgrads, fused_wgrads):
self.assertAllClose(basic, fused, rtol=1e-2, atol=1e-2)
if __name__ == "__main__":
tf.test.main()
|
gohin/django | refs/heads/master | django/conf/locale/he/formats.py | 619 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j בF Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j בF Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j בF'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
# NUMBER_GROUPING =
|
malaterre/ITK | refs/heads/master | Wrapping/Generators/Python/Tests/ModifiedTime.py | 2 | #==========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
#
# Test basic properties of modified times
#
import itk
#image and transform are in the same module, but filters are in a different module.
imType = itk.Image[itk.F, 2]
imTypeB = itk.Image[itk.UC, 2]
im = imType.New()
transType = itk.Transform[itk.D, 3]
trans = transType.New()
filtType = itk.AndImageFilter[imTypeB, imTypeB, imTypeB]
filt = filtType.New()
metricType = itk.ImageToImageMetricv4[imType, imType]
met = metricType.New()
#We modify them in the order image, transform, filter
for _ in range(3000):
im.Modified()
trans.Modified()
met.Modified()
filt.Modified()
#and their Modified times should respect that order.
assert im.GetMTime() < trans.GetMTime()
assert trans.GetMTime() < met.GetMTime()
assert met.GetMTime() < filt.GetMTime()
|
Nepherhotep/django | refs/heads/master | tests/annotations/models.py | 238 | # coding: utf-8
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
age = models.IntegerField()
friends = models.ManyToManyField('self', blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Publisher(models.Model):
name = models.CharField(max_length=255)
num_awards = models.IntegerField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=9)
name = models.CharField(max_length=255)
pages = models.IntegerField()
rating = models.FloatField()
price = models.DecimalField(decimal_places=2, max_digits=6)
authors = models.ManyToManyField(Author)
contact = models.ForeignKey(Author, models.CASCADE, related_name='book_contact_set')
publisher = models.ForeignKey(Publisher, models.CASCADE)
pubdate = models.DateField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Store(models.Model):
name = models.CharField(max_length=255)
books = models.ManyToManyField(Book)
original_opening = models.DateTimeField()
friday_night_closing = models.TimeField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class DepartmentStore(Store):
chain = models.CharField(max_length=255)
def __str__(self):
return '%s - %s ' % (self.chain, self.name)
@python_2_unicode_compatible
class Employee(models.Model):
# The order of these fields matter, do not change. Certain backends
# rely on field ordering to perform database conversions, and this
# model helps to test that.
first_name = models.CharField(max_length=20)
manager = models.BooleanField(default=False)
last_name = models.CharField(max_length=20)
store = models.ForeignKey(Store, models.CASCADE)
age = models.IntegerField()
salary = models.DecimalField(max_digits=8, decimal_places=2)
def __str__(self):
return '%s %s' % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Company(models.Model):
name = models.CharField(max_length=200)
motto = models.CharField(max_length=200, null=True, blank=True)
ticker_name = models.CharField(max_length=10, null=True, blank=True)
description = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return ('Company(name=%s, motto=%s, ticker_name=%s, description=%s)'
% (self.name, self.motto, self.ticker_name, self.description)
)
@python_2_unicode_compatible
class Ticket(models.Model):
active_at = models.DateTimeField()
duration = models.DurationField()
def __str__(self):
return '{} - {}'.format(self.active_at, self.duration)
|
unreal666/youtube-dl | refs/heads/master | youtube_dl/extractor/arkena.py | 41 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
determine_ext,
ExtractorError,
float_or_none,
int_or_none,
mimetype2ext,
parse_iso8601,
strip_jsonp,
)
class ArkenaIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:
video\.arkena\.com/play2/embed/player\?|
play\.arkena\.com/(?:config|embed)/avp/v\d/player/media/(?P<id>[^/]+)/[^/]+/(?P<account_id>\d+)
)
'''
_TESTS = [{
'url': 'https://play.arkena.com/embed/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411',
'md5': 'b96f2f71b359a8ecd05ce4e1daa72365',
'info_dict': {
'id': 'b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe',
'ext': 'mp4',
'title': 'Big Buck Bunny',
'description': 'Royalty free test video',
'timestamp': 1432816365,
'upload_date': '20150528',
'is_live': False,
},
}, {
'url': 'https://play.arkena.com/config/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411/?callbackMethod=jQuery1111023664739129262213_1469227693893',
'only_matching': True,
}, {
'url': 'http://play.arkena.com/config/avp/v1/player/media/327336/darkmatter/131064/?callbackMethod=jQuery1111002221189684892677_1469227595972',
'only_matching': True,
}, {
'url': 'http://play.arkena.com/embed/avp/v1/player/media/327336/darkmatter/131064/',
'only_matching': True,
}, {
'url': 'http://video.arkena.com/play2/embed/player?accountId=472718&mediaId=35763b3b-00090078-bf604299&pageStyling=styled',
'only_matching': True,
}]
@staticmethod
def _extract_url(webpage):
# See https://support.arkena.com/display/PLAY/Ways+to+embed+your+video
mobj = re.search(
r'<iframe[^>]+src=(["\'])(?P<url>(?:https?:)?//play\.arkena\.com/embed/avp/.+?)\1',
webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
account_id = mobj.group('account_id')
# Handle http://video.arkena.com/play2/embed/player URL
if not video_id:
qs = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
video_id = qs.get('mediaId', [None])[0]
account_id = qs.get('accountId', [None])[0]
if not video_id or not account_id:
raise ExtractorError('Invalid URL', expected=True)
playlist = self._download_json(
'https://play.arkena.com/config/avp/v2/player/media/%s/0/%s/?callbackMethod=_'
% (video_id, account_id),
video_id, transform_source=strip_jsonp)['Playlist'][0]
media_info = playlist['MediaInfo']
title = media_info['Title']
media_files = playlist['MediaFiles']
is_live = False
formats = []
for kind_case, kind_formats in media_files.items():
kind = kind_case.lower()
for f in kind_formats:
f_url = f.get('Url')
if not f_url:
continue
is_live = f.get('Live') == 'true'
exts = (mimetype2ext(f.get('Type')), determine_ext(f_url, None))
if kind == 'm3u8' or 'm3u8' in exts:
formats.extend(self._extract_m3u8_formats(
f_url, video_id, 'mp4', 'm3u8_native',
m3u8_id=kind, fatal=False, live=is_live))
elif kind == 'flash' or 'f4m' in exts:
formats.extend(self._extract_f4m_formats(
f_url, video_id, f4m_id=kind, fatal=False))
elif kind == 'dash' or 'mpd' in exts:
formats.extend(self._extract_mpd_formats(
f_url, video_id, mpd_id=kind, fatal=False))
elif kind == 'silverlight':
# TODO: process when ism is supported (see
# https://github.com/rg3/youtube-dl/issues/8118)
continue
else:
tbr = float_or_none(f.get('Bitrate'), 1000)
formats.append({
'url': f_url,
'format_id': '%s-%d' % (kind, tbr) if tbr else kind,
'tbr': tbr,
})
self._sort_formats(formats)
description = media_info.get('Description')
video_id = media_info.get('VideoId') or video_id
timestamp = parse_iso8601(media_info.get('PublishDate'))
thumbnails = [{
'url': thumbnail['Url'],
'width': int_or_none(thumbnail.get('Size')),
} for thumbnail in (media_info.get('Poster') or []) if thumbnail.get('Url')]
return {
'id': video_id,
'title': title,
'description': description,
'timestamp': timestamp,
'is_live': is_live,
'thumbnails': thumbnails,
'formats': formats,
}
|
anisku11/sublimeku | refs/heads/master | Packages/pygments/all/pygments/lexers/esoteric.py | 22 | # -*- coding: utf-8 -*-
"""
pygments.lexers.esoteric
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for esoteric languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer']
class BrainfuckLexer(RegexLexer):
"""
Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
language.
"""
name = 'Brainfuck'
aliases = ['brainfuck', 'bf']
filenames = ['*.bf', '*.b']
mimetypes = ['application/x-brainfuck']
tokens = {
'common': [
# use different colors for different instruction types
(r'[.,]+', Name.Tag),
(r'[+-]+', Name.Builtin),
(r'[<>]+', Name.Variable),
(r'[^.,+\-<>\[\]]+', Comment),
],
'root': [
(r'\[', Keyword, 'loop'),
(r'\]', Error),
include('common'),
],
'loop': [
(r'\[', Keyword, '#push'),
(r'\]', Keyword, '#pop'),
include('common'),
]
}
class BefungeLexer(RegexLexer):
"""
Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
language.
.. versionadded:: 0.7
"""
name = 'Befunge'
aliases = ['befunge']
filenames = ['*.befunge']
mimetypes = ['application/x-befunge']
tokens = {
'root': [
(r'[0-9a-f]', Number),
(r'[+*/%!`-]', Operator), # Traditional math
(r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
(r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
(r'[|_mw]', Keyword),
(r'[{}]', Name.Tag), # Befunge-98 stack ops
(r'".*?"', String.Double), # Strings don't appear to allow escapes
(r'\'.', String.Single), # Single character
(r'[#;]', Comment), # Trampoline... depends on direction hit
(r'[pg&~=@iotsy]', Keyword), # Misc
(r'[()A-Z]', Comment), # Fingerprints
(r'\s+', Text), # Whitespace doesn't matter
],
}
class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
Contributed by Adam Blinkinsop <blinks@acm.org>.
.. versionadded:: 0.8
"""
name = 'Redcode'
aliases = ['redcode']
filenames = ['*.cw']
opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
'ORG', 'EQU', 'END')
modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
tokens = {
'root': [
# Whitespace:
(r'\s+', Text),
(r';.*$', Comment.Single),
# Lexemes:
# Identifiers
(r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
(r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
(r'[A-Za-z_]\w+', Name),
# Operators
(r'[-+*/%]', Operator),
(r'[#$@<>]', Operator), # mode
(r'[.,]', Punctuation), # mode
# Numbers
(r'[-+]?\d+', Number.Integer),
],
}
|
iulian787/spack | refs/heads/develop | var/spack/repos/builtin/packages/librsb/package.py | 3 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Librsb(AutotoolsPackage):
"""librsb : A shared memory parallel sparse matrix computations
library for the Recursive Sparse Blocks format"""
homepage = "http://librsb.sourceforge.net/"
url = "http://download.sourceforge.net/librsb/librsb-1.2.0.9.tar.gz"
list_url = "https://sourceforge.net/projects/librsb/files/"
version('1.2.0.9', 'f421f5d572461601120933e3c1cfee2ca69e6ecc92cbb11baa4e86bdedd3d9fa')
version('1.2.0.8', '8bebd19a1866d80ade13eabfdd0f07ae7e8a485c0b975b5d15f531ac204d80cb')
depends_on('zlib')
conflicts('%apple-clang')
conflicts('%clang')
def configure_args(self):
args = [
'--enable-openmp',
'--with-zlib',
'--enable-fortran-module-install',
'CPPFLAGS={0}'.format(self.spec['zlib'].headers.include_flags),
'CFLAGS=-O3',
'LDFLAGS={0}'.format(self.spec['zlib'].libs.search_flags)
]
return args
|
stacywsmith/ansible | refs/heads/devel | lib/ansible/modules/packaging/language/easy_install.py | 70 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: easy_install
short_description: Installs Python libraries
description:
- Installs Python libraries, optionally in a I(virtualenv)
version_added: "0.7"
options:
name:
description:
- A Python library name
required: true
default: null
aliases: []
virtualenv:
description:
- an optional I(virtualenv) directory path to install into. If the
I(virtualenv) does not exist, it is created automatically
required: false
default: null
virtualenv_site_packages:
version_added: "1.1"
description:
- Whether the virtual environment will inherit packages from the
global site-packages directory. Note that if this setting is
changed on an already existing virtual environment it will not
have any effect, the environment must be deleted and newly
created.
required: false
default: "no"
choices: [ "yes", "no" ]
virtualenv_command:
version_added: "1.1"
description:
- The command to create the virtual environment with. For example
C(pyvenv), C(virtualenv), C(virtualenv2).
required: false
default: virtualenv
executable:
description:
- The explicit executable or a pathname to the executable to be used to
run easy_install for a specific version of Python installed in the
system. For example C(easy_install-3.3), if there are both Python 2.7
and 3.3 installations in the system and you want to run easy_install
for the Python 3.3 installation.
version_added: "1.3"
required: false
default: null
state:
version_added: "2.0"
description:
- The desired state of the library. C(latest) ensures that the latest version is installed.
required: false
choices: [present, latest]
default: present
notes:
- Please note that the C(easy_install) module can only install Python
libraries. Thus this module is not able to remove libraries. It is
generally recommended to use the M(pip) module which you can first install
using M(easy_install).
- Also note that I(virtualenv) must be installed on the remote host if the
C(virtualenv) parameter is specified.
requirements: [ "virtualenv" ]
author: "Matt Wright (@mattupstate)"
'''
EXAMPLES = '''
# Examples from Ansible Playbooks
- easy_install:
name: pip
state: latest
# Install Bottle into the specified virtualenv.
- easy_install:
name: bottle
virtualenv: /webapps/myapp/venv
'''
import tempfile
import os.path
def _is_package_installed(module, name, easy_install, executable_arguments):
executable_arguments = executable_arguments + ['--dry-run']
cmd = '%s %s %s' % (easy_install, ' '.join(executable_arguments), name)
rc, status_stdout, status_stderr = module.run_command(cmd)
if rc:
module.fail_json(msg=status_stderr)
return not ('Reading' in status_stdout or 'Downloading' in status_stdout)
def _get_easy_install(module, env=None, executable=None):
candidate_easy_inst_basenames = ['easy_install']
easy_install = None
if executable is not None:
if os.path.isabs(executable):
easy_install = executable
else:
candidate_easy_inst_basenames.insert(0, executable)
if easy_install is None:
if env is None:
opt_dirs = []
else:
# Try easy_install with the virtualenv directory first.
opt_dirs = ['%s/bin' % env]
for basename in candidate_easy_inst_basenames:
easy_install = module.get_bin_path(basename, False, opt_dirs)
if easy_install is not None:
break
# easy_install should have been found by now. The final call to
# get_bin_path will trigger fail_json.
if easy_install is None:
basename = candidate_easy_inst_basenames[0]
easy_install = module.get_bin_path(basename, True, opt_dirs)
return easy_install
def main():
arg_spec = dict(
name=dict(required=True),
state=dict(required=False,
default='present',
choices=['present','latest'],
type='str'),
virtualenv=dict(default=None, required=False),
virtualenv_site_packages=dict(default='no', type='bool'),
virtualenv_command=dict(default='virtualenv', required=False),
executable=dict(default='easy_install', required=False),
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
env = module.params['virtualenv']
executable = module.params['executable']
site_packages = module.params['virtualenv_site_packages']
virtualenv_command = module.params['virtualenv_command']
executable_arguments = []
if module.params['state'] == 'latest':
executable_arguments.append('--upgrade')
rc = 0
err = ''
out = ''
if env:
virtualenv = module.get_bin_path(virtualenv_command, True)
if not os.path.exists(os.path.join(env, 'bin', 'activate')):
if module.check_mode:
module.exit_json(changed=True)
command = '%s %s' % (virtualenv, env)
if site_packages:
command += ' --system-site-packages'
cwd = tempfile.gettempdir()
rc_venv, out_venv, err_venv = module.run_command(command, cwd=cwd)
rc += rc_venv
out += out_venv
err += err_venv
easy_install = _get_easy_install(module, env, executable)
cmd = None
changed = False
installed = _is_package_installed(module, name, easy_install, executable_arguments)
if not installed:
if module.check_mode:
module.exit_json(changed=True)
cmd = '%s %s %s' % (easy_install, ' '.join(executable_arguments), name)
rc_easy_inst, out_easy_inst, err_easy_inst = module.run_command(cmd)
rc += rc_easy_inst
out += out_easy_inst
err += err_easy_inst
changed = True
if rc != 0:
module.fail_json(msg=err, cmd=cmd)
module.exit_json(changed=changed, binary=easy_install,
name=name, virtualenv=env)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
kartikeys98/coala | refs/heads/master | coalib/bearlib/languages/definitions/Ruby.py | 14 | from coalib.bearlib.languages.Language import Language
@Language
class Ruby:
pass
|
0111001101111010/open-health-inspection-api | refs/heads/master | venv/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.py | 1002 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Utility functions for copying and archiving files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
from os.path import abspath
import fnmatch
import collections
import errno
from . import tarfile
try:
import bz2
_BZ2_SUPPORTED = True
except ImportError:
_BZ2_SUPPORTED = False
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"copytree", "move", "rmtree", "Error", "SpecialFileError",
"ExecError", "make_archive", "get_archive_formats",
"register_archive_format", "unregister_archive_format",
"get_unpack_formats", "register_unpack_format",
"unregister_unpack_format", "unpack_archive", "ignore_patterns"]
class Error(EnvironmentError):
pass
class SpecialFileError(EnvironmentError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
class ExecError(EnvironmentError):
"""Raised when a command could not be executed"""
class ReadError(EnvironmentError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
"""Raised when a registery operation with the archiving
and unpacking registeries fails"""
try:
WindowsError
except NameError:
WindowsError = None
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path, 'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def copyfile(src, dst):
"""Copy data from src to dst"""
if _samefile(src, dst):
raise Error("`%s` and `%s` are the same file" % (src, dst))
for fn in [src, dst]:
try:
st = os.stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if stat.S_ISFIFO(st.st_mode):
raise SpecialFileError("`%s` is a named pipe" % fn)
with open(src, 'rb') as fsrc:
with open(dst, 'wb') as fdst:
copyfileobj(fsrc, fdst)
def copymode(src, dst):
"""Copy mode bits from src to dst"""
if hasattr(os, 'chmod'):
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
os.chmod(dst, mode)
def copystat(src, dst):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
try:
os.chflags(dst, st.st_flags)
except OSError as why:
if (not hasattr(errno, 'EOPNOTSUPP') or
why.errno != errno.EOPNOTSUPP):
raise
def copy(src, dst):
"""Copy data and mode bits ("cp src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copymode(src, dst)
def copy2(src, dst):
"""Copy data and all stat info ("cp -p src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copystat(src, dst)
def ignore_patterns(*patterns):
"""Function that can be used as copytree() ignore parameter.
Patterns is a sequence of glob-style patterns
that are used to exclude files"""
def _ignore_patterns(path, names):
ignored_names = []
for pattern in patterns:
ignored_names.extend(fnmatch.filter(names, pattern))
return set(ignored_names)
return _ignore_patterns
def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
ignore_dangling_symlinks=False):
"""Recursively copy a directory tree.
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied. If the file pointed by the symlink doesn't
exist, an exception will be added in the list of errors raised in
an Error exception at the end of the copy process.
You can set the optional ignore_dangling_symlinks flag to true if you
want to silence this exception. Notice that this has no effect on
platforms that don't support os.symlink.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
The optional copy_function argument is a callable that will be used
to copy each file. It will be called with the source path and the
destination path as arguments. By default, copy2() is used, but any
function that supports the same signature (like copy()) can be used.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
os.symlink(linkto, dstname)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
continue
# otherwise let the copy occurs. copy2 will raise an error
copy_function(srcname, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
# Will raise a SpecialFileError for unsupported file types
copy_function(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
except EnvironmentError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError as why:
if WindowsError is not None and isinstance(why, WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
raise Error(errors)
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
try:
if os.path.islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
# can't continue even if onerror hook returns
return
names = []
try:
names = os.listdir(path)
except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def _basename(path):
# A basename() variant which first strips the trailing slash, if present.
# Thus we always get the last component of the path, even for directories.
return os.path.basename(path.rstrip(os.path.sep))
def move(src, dst):
"""Recursively move a file or directory to another location. This is
similar to the Unix "mv" command.
If the destination is a directory or a symlink to a directory, the source
is moved inside the directory. The destination path must not already
exist.
If the destination already exists but is not a directory, it may be
overwritten depending on os.rename() semantics.
If the destination is on our current filesystem, then rename() is used.
Otherwise, src is copied to the destination and then removed.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
real_dst = dst
if os.path.isdir(dst):
if _samefile(src, dst):
# We might be on a case insensitive filesystem,
# perform the rename anyway.
os.rename(src, dst)
return
real_dst = os.path.join(dst, _basename(src))
if os.path.exists(real_dst):
raise Error("Destination path '%s' already exists" % real_dst)
try:
os.rename(src, real_dst)
except OSError:
if os.path.isdir(src):
if _destinsrc(src, dst):
raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
copytree(src, real_dst, symlinks=True)
rmtree(src)
else:
copy2(src, real_dst)
os.unlink(src)
def _destinsrc(src, dst):
src = abspath(src)
dst = abspath(dst)
if not src.endswith(os.path.sep):
src += os.path.sep
if not dst.endswith(os.path.sep):
dst += os.path.sep
return dst.startswith(src)
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
owner=None, group=None, logger=None):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "bzip2", or None.
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_name' + ".tar", possibly plus
the appropriate compression extension (".gz", or ".bz2").
Returns the output filename.
"""
tar_compression = {'gzip': 'gz', None: ''}
compress_ext = {'gzip': '.gz'}
if _BZ2_SUPPORTED:
tar_compression['bzip2'] = 'bz2'
compress_ext['bzip2'] = '.bz2'
# flags for compression program, each element of list will be an argument
if compress is not None and compress not in compress_ext:
raise ValueError("bad value for 'compress', or compression format not "
"supported : {0}".format(compress))
archive_name = base_name + '.tar' + compress_ext.get(compress, '')
archive_dir = os.path.dirname(archive_name)
if not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# creating the tarball
if logger is not None:
logger.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
return archive_name
def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
# XXX see if we want to keep an external call here
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
from distutils.errors import DistutilsExecError
from distutils.spawn import spawn
try:
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise ExecError("unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility") % zip_filename
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
"zipfile" Python module (if available) or the InfoZIP "zip" utility
(if installed and found on the default search path). If neither tool is
available, raises ExecError. Returns the name of the output zip
file.
"""
zip_filename = base_name + ".zip"
archive_dir = os.path.dirname(base_name)
if not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# If zipfile module is not available, try spawning an external 'zip'
# command.
try:
import zipfile
except ImportError:
zipfile = None
if zipfile is None:
_call_external_zip(base_dir, zip_filename, verbose, dry_run)
else:
if logger is not None:
logger.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
if not dry_run:
zip = zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zip.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
zip.close()
return zip_filename
_ARCHIVE_FORMATS = {
'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (_make_zipfile, [], "ZIP file"),
}
if _BZ2_SUPPORTED:
_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
"bzip2'ed tar-file")
def get_archive_formats():
"""Returns a list of supported formats for archiving and unarchiving.
Each element of the returned sequence is a tuple (name, description)
"""
formats = [(name, registry[2]) for name, registry in
_ARCHIVE_FORMATS.items()]
formats.sort()
return formats
def register_archive_format(name, function, extra_args=None, description=''):
"""Registers an archive format.
name is the name of the format. function is the callable that will be
used to create archives. If provided, extra_args is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_archive_formats() function.
"""
if extra_args is None:
extra_args = []
if not isinstance(function, collections.Callable):
raise TypeError('The %s object is not callable' % function)
if not isinstance(extra_args, (tuple, list)):
raise TypeError('extra_args needs to be a sequence')
for element in extra_args:
if not isinstance(element, (tuple, list)) or len(element) !=2:
raise TypeError('extra_args elements are : (arg_name, value)')
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
def unregister_archive_format(name):
del _ARCHIVE_FORMATS[name]
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "bztar"
or "gztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
if logger is not None:
logger.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run, 'logger': logger}
try:
format_info = _ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError("unknown archive format '%s'" % format)
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
if logger is not None:
logger.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
def get_unpack_formats():
"""Returns a list of supported formats for unpacking.
Each element of the returned sequence is a tuple
(name, extensions, description)
"""
formats = [(name, info[0], info[3]) for name, info in
_UNPACK_FORMATS.items()]
formats.sort()
return formats
def _check_unpack_options(extensions, function, extra_args):
"""Checks what gets registered as an unpacker."""
# first make sure no other unpacker is registered for this extension
existing_extensions = {}
for name, info in _UNPACK_FORMATS.items():
for ext in info[0]:
existing_extensions[ext] = name
for extension in extensions:
if extension in existing_extensions:
msg = '%s is already registered for "%s"'
raise RegistryError(msg % (extension,
existing_extensions[extension]))
if not isinstance(function, collections.Callable):
raise TypeError('The registered function must be a callable')
def register_unpack_format(name, extensions, function, extra_args=None,
description=''):
"""Registers an unpack format.
`name` is the name of the format. `extensions` is a list of extensions
corresponding to the format.
`function` is the callable that will be
used to unpack archives. The callable will receive archives to unpack.
If it's unable to handle an archive, it needs to raise a ReadError
exception.
If provided, `extra_args` is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_unpack_formats() function.
"""
if extra_args is None:
extra_args = []
_check_unpack_options(extensions, function, extra_args)
_UNPACK_FORMATS[name] = extensions, function, extra_args, description
def unregister_unpack_format(name):
"""Removes the pack format from the registery."""
del _UNPACK_FORMATS[name]
def _ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _unpack_zipfile(filename, extract_dir):
"""Unpack zip `filename` to `extract_dir`
"""
try:
import zipfile
except ImportError:
raise ReadError('zlib not supported, cannot unpack this archive.')
if not zipfile.is_zipfile(filename):
raise ReadError("%s is not a zip file" % filename)
zip = zipfile.ZipFile(filename)
try:
for info in zip.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name:
continue
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
_ensure_directory(target)
if not name.endswith('/'):
# file
data = zip.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
finally:
zip.close()
def _unpack_tarfile(filename, extract_dir):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise ReadError(
"%s is not a compressed or uncompressed tar file" % filename)
try:
tarobj.extractall(extract_dir)
finally:
tarobj.close()
_UNPACK_FORMATS = {
'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
}
if _BZ2_SUPPORTED:
_UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
"bzip2'ed tar-file")
def _find_unpack_format(filename):
for name, info in _UNPACK_FORMATS.items():
for extension in info[0]:
if filename.endswith(extension):
return name
return None
def unpack_archive(filename, extract_dir=None, format=None):
"""Unpack an archive.
`filename` is the name of the archive.
`extract_dir` is the name of the target directory, where the archive
is unpacked. If not provided, the current working directory is used.
`format` is the archive format: one of "zip", "tar", or "gztar". Or any
other registered format. If not provided, unpack_archive will use the
filename extension and see if an unpacker was registered for that
extension.
In case none is found, a ValueError is raised.
"""
if extract_dir is None:
extract_dir = os.getcwd()
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
except KeyError:
raise ValueError("Unknown unpack format '{0}'".format(format))
func = format_info[1]
func(filename, extract_dir, **dict(format_info[2]))
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
if format is None:
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
kwargs = dict(_UNPACK_FORMATS[format][2])
func(filename, extract_dir, **kwargs)
|
HarryRybacki/osf.io | refs/heads/develop | website/addons/wiki/routes.py | 6 | """
"""
import os
from framework.routing import Rule, json_renderer
from website.routes import OsfWebRenderer
from . import views
TEMPLATE_DIR = '../addons/wiki/templates/'
settings_routes = {
'rules': [],
'prefix': '/api/v1',
}
widget_routes = {
'rules': [
Rule([
'/project/<pid>/wiki/widget/',
'/project/<pid>/node/<nid>/wiki/widget/',
], 'get', views.wiki_widget, json_renderer),
],
'prefix': '/api/v1',
}
# NOTE: <wname> refers to a wiki page's key, e.g. 'Home'
page_routes = {
'rules': [
# Home (Base) | GET
Rule([
'/project/<pid>/wiki/',
'/project/<pid>/node/<nid>/wiki/',
], 'get', views.project_wiki_home, OsfWebRenderer(os.path.join(TEMPLATE_DIR, 'edit.mako'))),
# View (Id) | GET
Rule([
'/project/<pid>/wiki/id/<wid>/',
'/project/<pid>/node/<nid>/wiki/id/<wid>/',
], 'get', views.project_wiki_id_page, OsfWebRenderer(os.path.join(TEMPLATE_DIR, 'edit.mako'))),
# Wiki | GET
Rule([
'/project/<pid>/wiki/<wname>/',
'/project/<pid>/node/<nid>/wiki/<wname>/',
], 'get', views.project_wiki_view, OsfWebRenderer(os.path.join(TEMPLATE_DIR, 'edit.mako'))),
# Edit | GET (legacy url, trigger redirect)
Rule([
'/project/<pid>/wiki/<wname>/edit/',
'/project/<pid>/node/<nid>/wiki/<wname>/edit/',
], 'get', views.project_wiki_edit, OsfWebRenderer(os.path.join(TEMPLATE_DIR, 'edit.mako'))),
# Compare | GET (legacy url, trigger redirect)
Rule([
'/project/<pid>/wiki/<wname>/compare/<int:wver>/',
'/project/<pid>/node/<nid>/wiki/<wname>/compare/<int:wver>/',
], 'get', views.project_wiki_compare, OsfWebRenderer(os.path.join(TEMPLATE_DIR, 'edit.mako'))),
# Edit | POST
Rule([
'/project/<pid>/wiki/<wname>/',
'/project/<pid>/node/<nid>/wiki/<wname>/',
], 'post', views.project_wiki_edit_post, OsfWebRenderer(os.path.join(TEMPLATE_DIR, 'edit.mako'))),
]
}
api_routes = {
'rules': [
# Home (Base) : GET
Rule([
'/project/<pid>/wiki/',
'/project/<pid>/node/<nid>/wiki/',
], 'get', views.project_wiki_home, json_renderer),
# Draft : GET
Rule([
'/project/<pid>/wiki/<wname>/draft/',
'/project/<pid>/node/<nid>/wiki/<wname>/draft/',
], 'get', views.wiki_page_draft, json_renderer),
# Content : GET
# <wver> refers to a wiki page's version number
Rule([
'/project/<pid>/wiki/<wname>/content/',
'/project/<pid>/node/<nid>/wiki/<wname>/content/',
'/project/<pid>/wiki/<wname>/content/<wver>/',
'/project/<pid>/node/<nid>/wiki/<wname>/content/<wver>/',
], 'get', views.wiki_page_content, json_renderer),
# Validate | GET
Rule([
'/project/<pid>/wiki/<wname>/validate/',
'/project/<pid>/node/<nid>/wiki/<wname>/validate/',
], 'get', views.project_wiki_validate_name, json_renderer),
# Edit | POST
Rule([
'/project/<pid>/wiki/<wname>/edit/',
'/project/<pid>/node/<nid>/wiki/<wname>/edit/',
], 'post', views.project_wiki_edit_post, json_renderer),
# Rename : PUT
Rule([
'/project/<pid>/wiki/<wname>/rename/',
'/project/<pid>/node/<nid>/wiki/<wname>/rename/',
], 'put', views.project_wiki_rename, json_renderer),
# Delete : DELETE
Rule([
'/project/<pid>/wiki/<wname>/',
'/project/<pid>/node/<nid>/wiki/<wname>/',
], 'delete', views.project_wiki_delete, json_renderer),
# Wiki Menu : GET
Rule([
'/project/<pid>/wiki/<wname>/grid/',
'/project/<pid>/node/<nid>/wiki/<wname>/grid/'
], 'get', views.project_wiki_grid_data, json_renderer),
],
'prefix': '/api/v1',
}
|
daubers/ComponentStock | refs/heads/master | Components/tests.py | 1 | __author__ = 'Matt'
from django.test import TestCase
from django.test.client import Client
import json
class ComponentTestCase(TestCase):
def setUp(self):
self.client = Client()
def addSupplier(self):
"""
Add a supplier to the database
returns the response
"""
compDict = {
"name": "Google",
"url": "http://www.google.com",
"account_username": "Bob",
}
postDict = json.dumps(compDict)
response = self.client.post('/supplier/add/', {'DATA': postDict, })
return response
def addManufacturer(self):
"""
Add a manufacturer to the database
returns the response
"""
compDict = {
"name": "Google",
"url": "http://www.google.com",
}
postDict = json.dumps(compDict)
response = self.client.post('/manufacturer/add/', {'DATA': postDict,})
return response
def test_adding_a_manufacturer(self):
"""
Attempts to add a manufactuerer to the database
"""
response = self.addManufacturer()
self.assertContains(response, "{\"HTTPRESPONSE\": 1}")
response = self.client.post('/manufacturer/get/', {'DATA': json.dumps({"id": None}), })
self.assertContains(response, "[{\"url\": \"http://www.google.com\", \"id\": 1, \"name\": \"Google\"}]")
def test_adding_a_supplier(self):
"""
Attempts to add a new supplier to the database
"""
response = self.addSupplier()
self.assertContains(response, "{\"HTTPRESPONSE\": 1}")
def test_adding_a_component(self):
"""
Attempts to add a new supplier
"""
response = self.addSupplier()
self.assertContains(response, "{\"HTTPRESPONSE\": 1}")
response = self.addManufacturer()
self.assertContains(response, "{\"HTTPRESPONSE\": 1}")
compDict = {
"name": 'Test1',
"cost": 4.00,
"manufacturer": 1,
"part_no": "ghykjh",
"datasheet_uri": "http://www.google.com/",
"supplier": 1
}
postDict = json.dumps(compDict)
response = self.client.post('/component/add/', {'DATA': postDict,})
self.assertContains(response, "{\"HTTPRESPONSE\": 1}")
|
zzcclp/spark | refs/heads/master | python/pyspark/ml/tests/test_stat.py | 23 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from pyspark.ml.linalg import Vectors
from pyspark.ml.stat import ChiSquareTest
from pyspark.sql import DataFrame
from pyspark.testing.mlutils import SparkSessionTestCase
class ChiSquareTestTests(SparkSessionTestCase):
def test_chisquaretest(self):
data = [[0, Vectors.dense([0, 1, 2])],
[1, Vectors.dense([1, 1, 1])],
[2, Vectors.dense([2, 1, 0])]]
df = self.spark.createDataFrame(data, ['label', 'feat'])
res = ChiSquareTest.test(df, 'feat', 'label')
# This line is hitting the collect bug described in #17218, commented for now.
# pValues = res.select("degreesOfFreedom").collect())
self.assertIsInstance(res, DataFrame)
fieldNames = set(field.name for field in res.schema.fields)
expectedFields = ["pValues", "degreesOfFreedom", "statistics"]
self.assertTrue(all(field in fieldNames for field in expectedFields))
if __name__ == "__main__":
from pyspark.ml.tests.test_stat import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
sharkspeed/dororis | refs/heads/dev | algorithm/psads/3-chapter/1-stack/stack_3_9.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Assume the infix expression is a string of tokens delimited by spaces. The operator tokens are *, /, +, and -, along with the left and right parentheses, ( and ). The operand tokens are the single-character identifiers A, B, C, and so on
from stack_3_3 import Stack
infix_string = '( a + b ) * ( c + d )'
def converter_in_post(string):
string_list = string.split(' ')
output_list = []
opstack = Stack()
ops = ['(', '+', '-', '*', '/', '**']
prec_list = [0, 1, 1, 2, 2, 3]
prec = dict(zip(ops, prec_list))
left = '('
right = ')'
for token in string_list:
print(token)
print(output_list)
if token == left:
opstack.push(token)
elif token == right:
while opstack.peek() != left:
output_list.append(opstack.pop())
opstack.pop()
elif token in ops:
while not opstack.is_empty() and prec[opstack.peek()] >= prec[token]:
output_list.append(opstack.pop())
opstack.push(token)
else:
output_list.append(token)
while not opstack.is_empty():
output_list.append(opstack.pop())
output = ' '.join(output_list)
print(output)
return output
def eval_postfix(string):
eval_stack = Stack()
ops = ['+', '-', '*', '/']
ops_eval = [
lambda x, y: x + y,
lambda x, y: x - y,
lambda x, y: x * y,
lambda x, y: x / y,
# lambda x, y: x == y,
]
ops_eval_map = dict(zip(ops, ops_eval))
for token in string.split(' '):
if token in ops:
if eval_stack.size() >= 2:
eval_stack.push(eval_helper(eval_stack, token, ops_eval_map))
else:
eval_stack.display()
raise Exception('Expression invalid')
else:
eval_stack.push(token)
# eval_stack.display()
if eval_stack.size() > 1:
while eval_stack.size() > 1:
eval_stack.pop()
elif eval_stack.size() < 1:
raise Exception('Expression invalid')
output = eval_stack.pop()
print(output)
return output
def eval_helper(eval_stack, token, ops_eval_map):
right_op = int(eval_stack.pop())
left_op = int(eval_stack.pop())
if token == '/' and right_op == 0:
raise Exception('right operand cant\' be zero')
return ops_eval_map[token](left_op, right_op)
converter_in_post(infix_string)
converter_in_post('( A + B ) * C')
converter_in_post('10 + 3 * 5 / ( 16 - 4 )')
converter_in_post('5 * 3 ** ( 4 - 2 )') # wrong answer: 5 3 4 2 - ** *
eval_postfix('4 5 6 * +')
eval_postfix('7 8 + 3 2 + /')
eval_postfix('17 10 + 3 * 9 / ==')
# TODO prefix
|
boneyao/sentry | refs/heads/master | src/sentry/search/elastic_search/__init__.py | 108 | from __future__ import absolute_import, print_function
|
0jpq0/kbengine | refs/heads/master | kbe/res/scripts/common/Lib/test/test_asyncio/test_windows_utils.py | 60 | """Tests for window_utils"""
import socket
import sys
import test.support
import unittest
from test.support import IPV6_ENABLED
from unittest import mock
if sys.platform != 'win32':
raise unittest.SkipTest('Windows only')
import _winapi
from asyncio import windows_utils
from asyncio import _overlapped
class WinsocketpairTests(unittest.TestCase):
def check_winsocketpair(self, ssock, csock):
csock.send(b'xxx')
self.assertEqual(b'xxx', ssock.recv(1024))
csock.close()
ssock.close()
def test_winsocketpair(self):
ssock, csock = windows_utils.socketpair()
self.check_winsocketpair(ssock, csock)
@unittest.skipUnless(IPV6_ENABLED, 'IPv6 not supported or enabled')
def test_winsocketpair_ipv6(self):
ssock, csock = windows_utils.socketpair(family=socket.AF_INET6)
self.check_winsocketpair(ssock, csock)
@mock.patch('asyncio.windows_utils.socket')
def test_winsocketpair_exc(self, m_socket):
m_socket.AF_INET = socket.AF_INET
m_socket.SOCK_STREAM = socket.SOCK_STREAM
m_socket.socket.return_value.getsockname.return_value = ('', 12345)
m_socket.socket.return_value.accept.return_value = object(), object()
m_socket.socket.return_value.connect.side_effect = OSError()
self.assertRaises(OSError, windows_utils.socketpair)
def test_winsocketpair_invalid_args(self):
self.assertRaises(ValueError,
windows_utils.socketpair, family=socket.AF_UNSPEC)
self.assertRaises(ValueError,
windows_utils.socketpair, type=socket.SOCK_DGRAM)
self.assertRaises(ValueError,
windows_utils.socketpair, proto=1)
@mock.patch('asyncio.windows_utils.socket')
def test_winsocketpair_close(self, m_socket):
m_socket.AF_INET = socket.AF_INET
m_socket.SOCK_STREAM = socket.SOCK_STREAM
sock = mock.Mock()
m_socket.socket.return_value = sock
sock.bind.side_effect = OSError
self.assertRaises(OSError, windows_utils.socketpair)
self.assertTrue(sock.close.called)
class PipeTests(unittest.TestCase):
def test_pipe_overlapped(self):
h1, h2 = windows_utils.pipe(overlapped=(True, True))
try:
ov1 = _overlapped.Overlapped()
self.assertFalse(ov1.pending)
self.assertEqual(ov1.error, 0)
ov1.ReadFile(h1, 100)
self.assertTrue(ov1.pending)
self.assertEqual(ov1.error, _winapi.ERROR_IO_PENDING)
ERROR_IO_INCOMPLETE = 996
try:
ov1.getresult()
except OSError as e:
self.assertEqual(e.winerror, ERROR_IO_INCOMPLETE)
else:
raise RuntimeError('expected ERROR_IO_INCOMPLETE')
ov2 = _overlapped.Overlapped()
self.assertFalse(ov2.pending)
self.assertEqual(ov2.error, 0)
ov2.WriteFile(h2, b"hello")
self.assertIn(ov2.error, {0, _winapi.ERROR_IO_PENDING})
res = _winapi.WaitForMultipleObjects([ov2.event], False, 100)
self.assertEqual(res, _winapi.WAIT_OBJECT_0)
self.assertFalse(ov1.pending)
self.assertEqual(ov1.error, ERROR_IO_INCOMPLETE)
self.assertFalse(ov2.pending)
self.assertIn(ov2.error, {0, _winapi.ERROR_IO_PENDING})
self.assertEqual(ov1.getresult(), b"hello")
finally:
_winapi.CloseHandle(h1)
_winapi.CloseHandle(h2)
def test_pipe_handle(self):
h, _ = windows_utils.pipe(overlapped=(True, True))
_winapi.CloseHandle(_)
p = windows_utils.PipeHandle(h)
self.assertEqual(p.fileno(), h)
self.assertEqual(p.handle, h)
# check garbage collection of p closes handle
del p
test.support.gc_collect()
try:
_winapi.CloseHandle(h)
except OSError as e:
self.assertEqual(e.winerror, 6) # ERROR_INVALID_HANDLE
else:
raise RuntimeError('expected ERROR_INVALID_HANDLE')
class PopenTests(unittest.TestCase):
def test_popen(self):
command = r"""if 1:
import sys
s = sys.stdin.readline()
sys.stdout.write(s.upper())
sys.stderr.write('stderr')
"""
msg = b"blah\n"
p = windows_utils.Popen([sys.executable, '-c', command],
stdin=windows_utils.PIPE,
stdout=windows_utils.PIPE,
stderr=windows_utils.PIPE)
for f in [p.stdin, p.stdout, p.stderr]:
self.assertIsInstance(f, windows_utils.PipeHandle)
ovin = _overlapped.Overlapped()
ovout = _overlapped.Overlapped()
overr = _overlapped.Overlapped()
ovin.WriteFile(p.stdin.handle, msg)
ovout.ReadFile(p.stdout.handle, 100)
overr.ReadFile(p.stderr.handle, 100)
events = [ovin.event, ovout.event, overr.event]
# Super-long timeout for slow buildbots.
res = _winapi.WaitForMultipleObjects(events, True, 10000)
self.assertEqual(res, _winapi.WAIT_OBJECT_0)
self.assertFalse(ovout.pending)
self.assertFalse(overr.pending)
self.assertFalse(ovin.pending)
self.assertEqual(ovin.getresult(), len(msg))
out = ovout.getresult().rstrip()
err = overr.getresult().rstrip()
self.assertGreater(len(out), 0)
self.assertGreater(len(err), 0)
# allow for partial reads...
self.assertTrue(msg.upper().rstrip().startswith(out))
self.assertTrue(b"stderr".startswith(err))
p.wait()
if __name__ == '__main__':
unittest.main()
|
DrDos0016/z2 | refs/heads/master | tools/one-offs/csv_audit.py | 1 | import csv
import os
import sys
import django
sys.path.append("/var/projects/museum/")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "z2.settings")
django.setup()
from z2_site.models import File
def main():
fh = open("/var/projects/museum/tools/archive.csv")
data = csv.reader(fh)
count = 0
for row in data:
title = row[0]
author = row[1]
filename = row[2]
# Check if the name is in our DB
if File.objects.filter(title=title).count() == 0:
# And is the filename missing as well?
zname = filename.split("/")[-1]
if File.objects.filter(filename=zname).count() == 0:
print("MISSING GAME")
print("\t", title)
print("\t", author)
print("\t", filename)
count += 1
print(count, "missing games")
return True
if __name__ == "__main__":
main()
|
poppogbr/genropy | refs/heads/master | gnrpy/gnr/web/gnrwebpage.py | 1 | #-*- coding: UTF-8 -*-
#--------------------------------------------------------------------------
# package : GenroPy web - see LICENSE for details
# module gnrwebcore : core module for genropy web framework
# Copyright (c) : 2004 - 2007 Softwell sas - Milano
# Written by : Giovanni Porcari, Michele Bertoldi
# Saverio Porcari, Francesco Porcari , Francesco Cavazzana
#--------------------------------------------------------------------------
#This library is free software; you can redistribute it and/or
#modify it under the terms of the GNU Lesser General Public
#License as published by the Free Software Foundation; either
#version 2.1 of the License, or (at your option) any later version.
#This library is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#Lesser General Public License for more details.
#You should have received a copy of the GNU Lesser General Public
#License along with this library; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#Created by Giovanni Porcari on 2007-03-24.
#Copyright (c) 2007 Softwell. All rights reserved.
import urllib
from gnr.web._gnrbasewebpage import GnrBaseWebPage
import os
import shutil
from gnr.core.gnrstring import toText, toJson, concat, jsquote,splitAndStrip
from gnr.core.gnrlang import getUuid
from mako.lookup import TemplateLookup
from gnr.web.gnrwebreqresp import GnrWebRequest, GnrWebResponse
from gnr.web.gnrwebpage_proxy.apphandler import GnrWebAppHandler
from gnr.web.gnrwebpage_proxy.connection import GnrWebConnection
from gnr.web.gnrwebpage_proxy.serverbatch import GnrWebBatch
from gnr.web.gnrwebpage_proxy.rpc import GnrWebRpc
from gnr.web.gnrwebpage_proxy.localizer import GnrWebLocalizer
from gnr.web.gnrwebpage_proxy.debugger import GnrWebDebugger
from gnr.web.gnrwebpage_proxy.utils import GnrWebUtils
from gnr.web.gnrwebpage_proxy.pluginhandler import GnrWebPluginHandler
from gnr.web.gnrwebpage_proxy.jstools import GnrWebJSTools
from gnr.web.gnrwebstruct import GnrGridStruct, struct_method
from gnr.core.gnrlang import gnrImport, GnrException
from gnr.core.gnrbag import Bag, BagResolver,BagCbResolver
from gnr.core.gnrlang import deprecated
from gnr.web.gnrbaseclasses import BaseComponent # DO NOT REMOVE, old code relies on BaseComponent being defined in this file
import datetime
AUTH_OK = 0
AUTH_NOT_LOGGED = 1
AUTH_FORBIDDEN = -1
PAGE_TIMEOUT = 60
PAGE_REFRESH = 20
class GnrWebPageException(GnrException):
pass
class GnrWebPage(GnrBaseWebPage):
"""add???"""
def __init__(self, site=None, request=None, response=None, request_kwargs=None, request_args=None, filepath=None,
packageId=None, basename=None, environ=None):
self.site = site
self.user_agent = request.user_agent or []
self.user_ip = request.remote_addr
self._environ = environ
self.isTouchDevice = ('iPad' in self.user_agent or 'iPhone' in self.user_agent)
self._event_subscribers = {}
self.local_datachanges = list()
self.forked = False # maybe redefine as _forked
self.filepath = filepath
self.packageId = packageId
self.basename = basename
self.siteFolder = self.site.site_path
self.folders = self._get_folders()
self.called_url = request.url
self.path_url = request.path_url
self.request = GnrWebRequest(request)
self.response = GnrWebResponse(response)
self._request = self.request._request
self._response = self.response._response
self.response.add_header('Pragma', 'no-cache')
self._htmlHeaders = []
self._pendingContextToCreate = []
self.pagename = os.path.splitext(os.path.basename(self.filepath))[0].split(os.path.sep)[-1]
self.pagepath = self.filepath.replace(self.folders['pages'], '')
self.debug_mode = False
self._dbconnection = None
self._user_login = request_kwargs.pop('_user_login', None)
self.page_timeout = self.site.config.getItem('page_timeout') or PAGE_TIMEOUT
self.page_refresh = self.site.config.getItem('page_refresh') or PAGE_REFRESH
self.private_kwargs = dict([(k[:2], v)for k, v in request_kwargs.items() if k.startswith('__')])
self.pagetemplate = request_kwargs.pop('pagetemplate', None) or getattr(self, 'pagetemplate', None) or \
self.site.config['dojo?pagetemplate'] or 'standard.tpl'
self.css_theme = request_kwargs.pop('css_theme', None) or getattr(self, 'css_theme', None) or self.site.config[
'gui?css_theme']
self.dojo_theme = request_kwargs.pop('dojo_theme', None) or getattr(self, 'dojo_theme', None)
self.dojo_version = request_kwargs.pop('dojo_version', None) or getattr(self, 'dojo_version', None)
self.debugopt = request_kwargs.pop('debugopt', None)
self.callcounter = request_kwargs.pop('callcounter', None) or 'begin'
if not hasattr(self, 'dojo_source'):
self.dojo_source = self.site.config['dojo?source']
if 'dojo_source' in request_kwargs:
self.dojo_source = request_kwargs.pop('dojo_source')
self.connection = GnrWebConnection(self,
connection_id=request_kwargs.pop('_connection_id', None),
user=request_kwargs.pop('_user', None))
page_id = request_kwargs.pop('page_id', None)
self.instantiateProxies()
self.onPreIniting(request_args, request_kwargs)
self._call_handler = self.get_call_handler(request_args, request_kwargs)
self.page_item = self._check_page_id(page_id, kwargs=request_kwargs)
self._workdate = self.page_item['data']['workdate'] #or datetime.date.today()
self.onIniting(request_args, request_kwargs)
self._call_args = request_args or tuple()
self._call_kwargs = request_kwargs or {}
def onPreIniting(self, *request_args, **request_kwargs):
"""add???"""
pass
def instantiateProxies(self):
proxy_classes = [(p[:-11],getattr(self,p, None)) for p in dir(self) if p.endswith('_proxyclass')]
for proxy_name,proxy_class in proxy_classes:
if proxy_class:
setattr(self,proxy_name,proxy_class(self))
def _check_page_id(self, page_id=None, kwargs=None):
if page_id:
if not self.connection.connection_id:
raise self.site.client_exception('The connection is not longer valid', self._environ)
if not self.connection.validate_page_id(page_id):
raise self.site.client_exception('The referenced page_id is not valid in this connection',
self._environ)
page_item = self.site.register.page(page_id)
if not page_item:
raise self.site.client_exception('The referenced page_id is cannot be found in site register',
self._environ)
self.page_id = page_id
return page_item
else:
if self._call_handler_type in ('pageCall', 'externalCall'):
raise self.site.client_exception('The request must reference a page_id', self._environ)
if not self.connection.connection_id:
self.connection.create()
self.page_id = getUuid()
workdate = kwargs.pop('_workdate_', None)# or datetime.date.today()
return self.site.register.new_page(self.page_id, self, data=dict(pageArgs=kwargs, workdate=workdate))
def get_call_handler(self, request_args, request_kwargs):
"""add???
:param request_args: add???
:param request_kwargs: add???
:returns: add???
"""
if '_plugin' in request_kwargs:
self._call_handler_type = 'plugin'
return self.pluginhandler.get_plugin(request_kwargs['_plugin'], request_args=request_args,
request_kwargs=request_kwargs)
elif 'rpc' in request_kwargs:
self._call_handler_type = 'externalCall'
return self.getPublicMethod('rpc', request_kwargs.pop('rpc'))
elif 'method' in request_kwargs:
self._call_handler_type = 'pageCall'
return self._rpcDispatcher
else:
self._call_handler_type = 'root'
return self.rootPage
# ##### BEGIN: PROXY DEFINITION ########
def _get_frontend(self):
if not hasattr(self, '_frontend'):
if not hasattr(self, 'page_frontend') and hasattr(self, 'dojo_version'):
self.page_frontend = 'dojo_%s' % self.dojo_version
frontend_module = gnrImport('gnr.web.gnrwebpage_proxy.frontend.%s' % self.page_frontend)
frontend_class = getattr(frontend_module, 'GnrWebFrontend')
self._frontend = frontend_class(self)
return self._frontend
frontend = property(_get_frontend)
def _get_localizer(self):
if not hasattr(self, '_localizer'):
self._localizer = GnrWebLocalizer(self)
return self._localizer
localizer = property(_get_localizer)
def _get_debugger(self):
if not hasattr(self, '_debugger'):
self._debugger = GnrWebDebugger(self)
return self._debugger
debugger = property(_get_debugger)
def _get_utils(self):
if not hasattr(self, '_utils'):
self._utils = GnrWebUtils(self)
return self._utils
utils = property(_get_utils)
def _get_rpc(self):
if not hasattr(self, '_rpc'):
self._rpc = GnrWebRpc(self)
return self._rpc
rpc = property(_get_rpc)
def _get_pluginhandler(self):
if not hasattr(self, '_pluginhandler'):
self._pluginhandler = GnrWebPluginHandler(self)
return self._pluginhandler
pluginhandler = property(_get_pluginhandler)
def _get_jstools(self):
if not hasattr(self, '_jstools'):
self._jstools = GnrWebJSTools(self)
return self._jstools
jstools = property(_get_jstools)
def _get_db(self):
if not hasattr(self, '_db'):
self._db = self.application.db
self._db.updateEnv(storename=getattr(self, 'storename', None), workdate=self.workdate, locale=self.locale,
user=self.user, userTags=self.userTags, pagename=self.pagename)
avatar = self.avatar
if avatar:
self._db.updateEnv(**self.avatar.extra_kwargs)
storeDbEnv = self.pageStore().getItem('dbenv')
if storeDbEnv:
self._db.updateEnv(**dict(storeDbEnv))
for dbenv in [getattr(self, x) for x in dir(self) if x.startswith('dbenv_')]:
kwargs = dbenv() or {}
self._db.updateEnv(**kwargs)
return self._db
db = property(_get_db)
def _get_workdate(self):
return self._workdate or datetime.date.today()
def _set_workdate(self, workdate):
with self.pageStore() as store:
store.setItem('workdate', workdate)
self._workdate = workdate
self.db.workdate = workdate
workdate = property(_get_workdate, _set_workdate)
###### END: PROXY DEFINITION #########
def __call__(self):
"""Internal method dispatcher"""
self.onInit() ### kept for compatibility
self._onBegin()
args = self._call_args
kwargs = self._call_kwargs
result = self._call_handler(*args, **kwargs)
self._onEnd()
return result
def _rpcDispatcher(self, method=None, mode='bag', **kwargs):
parameters = self.site.parse_kwargs(kwargs, workdate=self.workdate)
self._lastUserEventTs = parameters.pop('_lastUserEventTs', None)
self.site.handle_clientchanges(self.page_id, parameters)
auth = AUTH_OK
if not method in ('doLogin', 'onClosePage'):
auth = self._checkAuth(method=method, **parameters)
if self.isDeveloper():
result = self.rpc(method=method, _auth=auth, **parameters)
else:
try:
result = self.rpc(method=method, _auth=auth, **parameters)
except GnrException, e:
self.rpc.error = str(e)
result = None
result_handler = getattr(self.rpc, 'result_%s' % mode.lower())
return_result = result_handler(result)
return return_result
def _checkAuth(self, method=None, **parameters):
pageTags = self.pageAuthTags(method=method, **parameters)
if not pageTags:
return AUTH_OK
if not self.connection.loggedUser:
if method != 'main':
return 'EXPIRED'
return AUTH_NOT_LOGGED
if not self.application.checkResourcePermission(pageTags, self.userTags):
return AUTH_FORBIDDEN
return AUTH_OK
def mixinComponent(self, pkg, *path):
"""add???
:param pkg: add???
:param \*path: add???
"""
self.site.resource_loader.mixinPageComponent(self, pkg, *path)
@property
def isGuest(self):
"""add???
:returns: add???
"""
return self.user == self.connection.guestname
def rpc_doLogin(self, login=None, guestName=None, **kwargs):
"""Service method. Set user's avatar into its connection if:
* The user exists and his password is correct.
* The user is a guest
:param login: add???. Default value is ``None``
:param guestName: add???. Default value is ``None``
:returns: add???
"""
loginPars = {}
if guestName:
avatar = self.application.getAvatar(guestName)
else:
avatar = self.application.getAvatar(login['user'], password=login['password'],
authenticate=True, page=self, **kwargs)
if avatar:
self.site.onAuthenticated(avatar)
self.avatar = avatar
#self.connection.change_user(user=avatar.user,user_id=avatar.user_id,user_name=avatar.user_name,
# user_tags=avatar.user_tags)
self.connection.change_user(avatar)
self.setInClientData('gnr.avatar', Bag(avatar.as_dict()))
login['message'] = ''
loginPars = avatar.loginPars
loginPars.update(avatar.extra_kwargs)
else:
login['message'] = 'invalid login'
return (login, loginPars)
def onInit(self):
"""add???"""
# subclass hook
pass
def onIniting(self, request_args, request_kwargs):
"""Callback onIniting called in early stages of page initialization
:param request_args: add???
:param request_kwargs: add???
"""
pass
def onSaving(self, recordCluster, recordClusterAttr, resultAttr=None):
"""add???
:param recordCluster: add???
:param recordClusterAttr: add???
:param resultAttr: add???. Default value is ``None``
"""
pass
def onSaved(self, record, resultAttr=None, **kwargs):
"""add???
:param record: add???
:param resultAttr: add???. Default value is ``None``
"""
pass
def onDeleting(self, recordCluster, recordClusterAttr):
"""add???
:param recordCluster: add???
:param recordClusterAttr: add???
"""
pass
def onDeleted(self, record):
"""add???
:param record: add???
"""
pass
def onBegin(self):
"""add???"""
pass
def _onBegin(self):
self.onBegin()
self._publish_event('onBegin')
def onEnd(self):
"""add???"""
pass
def getService(self, service_type):
"""add???
:param service_type: add???
:returns: add???
"""
return self.site.getService(service_type)
def _onEnd(self):
self._publish_event('onEnd')
self.onEnd()
def collectClientDatachanges(self):
"""add???
:returns: add???
"""
self._publish_event('onCollectDatachanges')
result = self.site.get_datachanges(self.page_id, user=self.user,
local_datachanges=self.local_datachanges)
return result
def _subscribe_event(self, event, caller):
assert hasattr(caller, 'event_%s' % event)
self._event_subscribers.setdefault(event, []).append(caller)
def _publish_event(self, event):
for subscriber in self._event_subscribers.get(event, []):
getattr(subscriber, 'event_%s' % event)()
def rootPage(self, **kwargs):
"""add???
:returns: add???
"""
self.charset = 'utf-8'
arg_dict = self.build_arg_dict(**kwargs)
tpl = self.pagetemplate
if not isinstance(tpl, basestring):
tpl = '%s.%s' % (self.pagename, 'tpl')
lookup = TemplateLookup(directories=self.tpldirectories, output_encoding=self.charset,
encoding_errors='replace')
try:
mytemplate = lookup.get_template(tpl)
except:
raise GnrWebPageException("No template %s found in %s" % (tpl, str(self.tpldirectories)))
self.htmlHeaders()
return mytemplate.render(mainpage=self, **arg_dict)
def _set_locale(self, val):
self._locale = val
def _get_locale(self): # TODO IMPLEMENT DEFAULT FROM APP OR AVATAR
if not hasattr(self, '_locale'):
self._locale = self.connection.locale or self.request.headers.get('Accept-Language', 'en').split(',')[
0] or 'en'
return self._locale
locale = property(_get_locale, _set_locale)
def rpc_changeLocale(self, locale):
"""add???
:param locale: add???
"""
self.connection.locale = locale.lower()
def toText(self, obj, locale=None, format=None, mask=None, encoding=None, dtype=None):
"""add???
:param obj: add???
:param locale: add???. Default value is ``None``
:param format: add???. Default value is ``None``
:param mask: add???. Default value is ``None``
:param encoding: add???. Default value is ``None``
:param dtype: add???. Default value is ``None``
:returns: add???
"""
locale = locale or self.locale
return toText(obj, locale=locale, format=format, mask=mask, encoding=encoding)
def getUuid(self):
"""add???
:returns: add???
"""
return getUuid()
def addHtmlHeader(self, tag, innerHtml='', **kwargs):
"""add???
:param tag: add???
:param innerHtml: add???. Default value is ``''``
"""
attrString = ' '.join(['%s="%s"' % (k, str(v)) for k, v in kwargs.items()])
self._htmlHeaders.append('<%s %s>%s</%s>' % (tag, attrString, innerHtml, tag))
def htmlHeaders(self):
"""add???"""
pass
def _get_pageArgs(self):
return self.pageStore().getItem('pageArgs') or {}
pageArgs = property(_get_pageArgs)
def _(self, txt):
if txt.startswith('!!'):
txt = self.localizer.translateText(txt[2:])
return txt
def getPublicMethod(self, prefix, method):
"""add???
:param prefix: add???
:param method: add???
:returns: add???
"""
handler = None
if '.' in method:
proxy_name, submethod = method.split('.', 1)
proxy_object = getattr(self, proxy_name, None)
if not proxy_object:
proxy_class = self.pluginhandler.get_plugin(proxy_name)
proxy_object = proxy_class(self)
if proxy_object:
handler = getattr(proxy_object, '%s_%s' % (prefix, submethod), None)
else:
handler = getattr(self, '%s_%s' % (prefix, method))
return handler
def build_arg_dict(self, **kwargs):
"""add???
:returns: add???
"""
gnr_static_handler = self.site.getStatic('gnr')
gnrModulePath = gnr_static_handler.url(self.gnrjsversion)
arg_dict = {}
self.frontend.frontend_arg_dict(arg_dict)
arg_dict['customHeaders'] = self._htmlHeaders
arg_dict['charset'] = self.charset
arg_dict['filename'] = self.pagename
arg_dict['pageMode'] = 'wsgi_10'
arg_dict['baseUrl'] = self.site.home_uri
if self.debugopt:
kwargs['debugopt'] = self.debugopt
if self.isDeveloper:
kwargs['isDeveloper'] = True
arg_dict['startArgs'] = toJson(kwargs)
arg_dict['page_id'] = self.page_id or getUuid()
arg_dict['bodyclasses'] = self.get_bodyclasses()
arg_dict['gnrModulePath'] = gnrModulePath
gnrimports = self.frontend.gnrjs_frontend()
if self.site.debug or self.isDeveloper():
arg_dict['genroJsImport'] = [self.mtimeurl(self.gnrjsversion, 'js', '%s.js' % f) for f in gnrimports]
elif self.site.config['closure_compiler']:
jsfiles = [gnr_static_handler.path(self.gnrjsversion, 'js', '%s.js' % f) for f in gnrimports]
arg_dict['genroJsImport'] = [self.jstools.closurecompile(jsfiles)]
else:
jsfiles = [gnr_static_handler.path(self.gnrjsversion, 'js', '%s.js' % f) for f in gnrimports]
arg_dict['genroJsImport'] = [self.jstools.compress(jsfiles)]
arg_dict['css_genro'] = self.get_css_genro()
arg_dict['js_requires'] = [x for x in [self.getResourceUri(r, 'js', add_mtime=True) for r in self.js_requires]
if x]
css_path, css_media_path = self.get_css_path()
arg_dict['css_requires'] = css_path
arg_dict['css_media_requires'] = css_media_path
return arg_dict
def mtimeurl(self, *args):
"""add???
:returns: add???
"""
gnr_static_handler = self.site.getStatic('gnr')
fpath = gnr_static_handler.path(*args)
mtime = os.stat(fpath).st_mtime
url = gnr_static_handler.url(*args)
url = '%s?mtime=%0.0f' % (url, mtime)
return url
def homeUrl(self):
"""add???
:returns: add???
"""
return self.site.home_uri
def packageUrl(self, *args, **kwargs):
"""add???
:returns: add???
"""
pkg = kwargs.get('pkg', self.packageId)
return self.site.pkg_page_url(pkg, *args)
def getDomainUrl(self, path='', **kwargs):
"""add???
:param path: add???. Default value is ``''``
:returns: add???
"""
params = urllib.urlencode(kwargs)
path = '%s/%s' % (self.site.home_uri.rstrip('/'), path.lstrip('/'))
if params:
path = '%s?%s' % (path, params)
return path
def externalUrl(self, path, **kwargs):
"""add???
:param path: add???. Default value is ``''``
:returns: add???
"""
params = urllib.urlencode(kwargs)
#path = os.path.join(self.homeUrl(), path)
if path == '': path = self.siteUri
path = self._request.relative_url(path)
if params:
path = '%s?%s' % (path, params)
return path
def externalUrlToken(self, path, _expiry=None, _host=None, method='root', **kwargs):
"""add???
:param path: add???
:param _expiry: add???. Default value is ``None``
:param _host: add???. Default value is ``None``
:param method: add???. Default value is ``root``
:returns: add???
"""
assert 'sys' in self.site.gnrapp.packages
external_token = self.db.table('sys.external_token').create_token(path, expiry=_expiry, allowed_host=_host,
method=method, parameters=kwargs,
exec_user=self.user)
return self.externalUrl(path, gnrtoken=external_token)
def get_bodyclasses(self): # ancora necessario _common_d11?
"""add???
:returns: add???
"""
return '%s _common_d11 pkg_%s page_%s %s' % (
self.frontend.theme or '', self.packageId, self.pagename, getattr(self, 'bodyclasses', ''))
def get_css_genro(self):
"""add???
:returns: add???
"""
css_genro = self.frontend.css_genro_frontend()
for media in css_genro.keys():
css_genro[media] = [self.mtimeurl(self.gnrjsversion, 'css', '%s.css' % f) for f in css_genro[media]]
return css_genro
def _get_domSrcFactory(self):
return self.frontend.domSrcFactory
domSrcFactory = property(_get_domSrcFactory)
def newSourceRoot(self):
"""add???
:returns: add???
"""
return self.domSrcFactory.makeRoot(self)
def newGridStruct(self, maintable=None):
"""Allow to create a Grid Struct.
:param maintable: the table to which the struct refers to. For more information,
check the :ref:`webpages_maintable` section. Default value is ``None``
:returns: the Grid Struct
"""
return GnrGridStruct.makeRoot(self, maintable=maintable)
def _get_folders(self):
return {'pages': self.site.pages_dir,
'site': self.site.site_path,
'current': os.path.dirname(self.filepath)}
def subscribeTable(self,table,subscribe=True):
with self.pageStore() as store:
subscribed_tables = store.register_item['subscribed_tables']
if subscribe:
if not table in subscribed_tables:
subscribed_tables.append(table)
else:
if table in subscribed_tables:
subscribed_tables.remove(table)
def pageStore(self, page_id=None, triggered=True):
"""add???
:param page_id: add???. Deafult value is ``None``
:param triggered: boolean. add???. Deafult value is ``True``
:returns: add???
"""
page_id = page_id or self.page_id
return self.site.register.pageStore(page_id, triggered=triggered)
def connectionStore(self, connection_id=None, triggered=True):
"""add???
:param connection_id: add???. Default value is ``None``
:param triggered: boolean. add???. Default value is ``True``
:returns: add???
"""
connection_id = connection_id or self.connection_id
return self.site.register.connectionStore(connection_id, triggered=triggered)
def userStore(self, user=None, triggered=True):
"""add???
:param user: add???. Default value is ``None``
:param triggered: boolean. add???. Default value is ``True``
:returns: add???
"""
user = user or self.user
return self.site.register.userStore(user, triggered=triggered)
def rpc_setStoreSubscription(self, storename=None, client_path=None, active=True):
"""add???
:param storename: add???. Default value is ``None``
:param client_path: add???. Default value is ``None``
:param active: boolean. add???. Default value is ``True``
"""
with self.pageStore() as store:
subscriptions = store.getItem('_subscriptions')
if subscriptions is None:
subscriptions = dict()
store.setItem('_subscriptions', subscriptions)
storesub = subscriptions.setdefault(storename, {})
pathsub = storesub.setdefault(client_path, {})
pathsub['on'] = active
def clientPage(self, page_id=None):
"""add???
:param page_id: add???. Default value is ``None``
:returns: add???
"""
return ClientPageHandler(self, page_id or self.page_id)
def _get_pkgapp(self):
if not hasattr(self, '_pkgapp'):
self._pkgapp = self.site.gnrapp.packages[self.packageId]
return self._pkgapp
pkgapp = property(_get_pkgapp)
def _get_sitepath(self):
return self.site.site_path
sitepath = property(_get_sitepath)
def _get_siteUri(self):
return self.site.home_uri
siteUri = property(_get_siteUri)
def _get_parentdirpath(self):
try:
return self._parentdirpath
except AttributeError:
self._parentdirpath = self.resolvePath()
return self._parentdirpath
parentdirpath = property(_get_parentdirpath)
@property
def subscribedTablesDict(self):
"""Return a dict of subscribed tables. Every element is a list
of *page_id*\'s that subscribe that page"""
if not hasattr(self, '_subscribedTablesDict'):
self._subscribedTablesDict = self.db.table('adm.served_page').subscribedTablesDict()
return self._subscribedTablesDict
@property
def application(self):
"""add???"""
return self.site.gnrapp
@property
def app(self):
"""add???"""
if not hasattr(self, '_app'):
self._app = GnrWebAppHandler(self)
return self._app
@property
def btc(self):
"""add???"""
if not hasattr(self, '_btc'):
self._btc = GnrWebBatch(self)
return self._btc
@property
def catalog(self):
"""add???"""
return self.application.catalog
@property
def userTags(self):
"""add???"""
return self.connection.user_tags
@property
def user(self):
"""add???"""
return self.connection.user
@property
def connection_id(self):
"""add???"""
return self.connection.connection_id
def _set_avatar(self, avatar):
self._avatar = avatar
def _get_avatar(self):
if self.isGuest:
return
if not hasattr(self, '_avatar'):
connection = self.connection
avatar_extra = connection.avatar_extra or dict()
self._avatar = self.application.getAvatar(self.user, tags=connection.user_tags, page=self,
**avatar_extra)
return self._avatar
avatar = property(_get_avatar, _set_avatar)
def checkPermission(self, pagepath, relative=True):
"""add???
:param pagepath: add???
:param relative: add???. Default value is ``True``
:returns: add???
"""
return self.application.checkResourcePermission(self.auth_tags, self.userTags)
def get_css_theme(self):
"""Get the css_theme. The css_theme get is the one defined the :ref:`siteconfig_gui` tag of
your :ref:`sites_siteconfig` or in a single :ref:`webpages_webpages` through the
:ref:`webpages_css_theme` webpage variable
:returns: the css theme
"""
return self.css_theme
def get_css_path(self, requires=None):
"""Get the css path included in the :ref:`webpages_css_requires`.
:param requires: If None, get the css_requires string included in a :ref:`webpages_webpages`
:returns: add???
"""
requires = [r for r in (requires or self.css_requires) if r]
css_theme = self.get_css_theme()
if css_theme:
requires.append('themes/%s' % self.css_theme)
self.onServingCss(requires)
#requires.reverse()
filepath = os.path.splitext(self.filepath)[0]
css_requires = []
css_media_requires = {}
for css in requires:
if ':' in css:
css, media = css.split(':')
else:
media = None
csslist = self.site.resource_loader.getResourceList(self.resourceDirs, css, 'css')
if csslist:
#csslist.reverse()
css_uri_list = [self.getResourceUri(css, add_mtime=True) for css in csslist]
if media:
css_media_requires.setdefault(media, []).extend(css_uri_list)
else:
css_requires.extend(css_uri_list)
if os.path.isfile('%s.css' % filepath):
css_requires.append(self.getResourceUri('%s.css' % filepath, add_mtime=True))
if os.path.isfile(self.resolvePath('%s.css' % self.pagename)):
css_requires.append('%s.css' % self.pagename)
return css_requires, css_media_requires
def getResourceList(self, path, ext=None):
return self.site.resource_loader.getResourceList(self.resourceDirs, path, ext=ext)
def getResourceUriList(self, path, ext=None, add_mtime=False):
flist = self.getResourceList(path, ext=ext)
return [self.resolveResourceUri(f, add_mtime=add_mtime) for f in flist]
def getResourceExternalUriList(self,path, ext=None, add_mtime=False):
flist = self.getResourceList(path, ext=ext)
return [self.externalUrl(self.resolveResourceUri(f, add_mtime=add_mtime)) for f in flist]
def onServingCss(self, css_requires):
"""add???
:param css_requires: add???
"""
pass
def getResourceUri(self, path, ext=None, add_mtime=False):
fpath = self.getResource(path, ext=ext)
if not fpath:
return
return self.resolveResourceUri(fpath, add_mtime=add_mtime)
def resolveResourceUri(self, fpath, add_mtime=False):
url = None
if fpath.startswith(self.site.site_path):
uripath = fpath[len(self.site.site_path):].lstrip('/').split(os.path.sep)
url = self.site.getStatic('site').url(*uripath)
elif fpath.startswith(self.site.pages_dir):
uripath = fpath[len(self.site.pages_dir):].lstrip('/').split(os.path.sep)
url = self.site.getStatic('pages').url(*uripath)
elif fpath.startswith(self.package_folder):
uripath = fpath[len(self.package_folder):].lstrip('/').split(os.path.sep)
url = self.site.getStatic('pkg').url(self.packageId, *uripath)
else:
for rsrc, rsrc_path in self.site.resources.items():
if fpath.startswith(rsrc_path):
uripath = fpath[len(rsrc_path):].lstrip('/').split(os.path.sep)
url = self.site.getStatic('rsrc').url(rsrc, *uripath)
break
if url and add_mtime:
mtime = os.stat(fpath).st_mtime
url = '%s?mtime=%0.0f' % (url, mtime)
return url
def getResource(self, path, ext=None):
result = self.site.resource_loader.getResourceList(self.resourceDirs, path, ext=ext)
if result:
return result[0]
def setPreference(self, path, data, pkg=''):
self.site.setPreference(path, data, pkg=pkg)
def getPreference(self, path, pkg='', dflt=''):
return self.site.getPreference(path, pkg=pkg, dflt=dflt)
def getUserPreference(self, path, pkg='', dflt='', username=''):
return self.site.getUserPreference(path, pkg=pkg, dflt=dflt, username=username)
def rpc_getUserPreference(self, path='*'):
return self.getUserPreference(path)
def rpc_getAppPreference(self, path='*'):
return self.getPreference(path)
def setUserPreference(self, path, data, pkg='', username=''):
self.site.setUserPreference(path, data, pkg=pkg, username=username)
def setInClientData(self, path, value=None, attributes=None, page_id=None, filters=None,
fired=False, reason=None, public=False, replace=False):
if filters:
pages = self.site.register.pages(filters=filters)
else:
pages = [page_id]
for page_id in pages:
if not public and (page_id is None or page_id == self.page_id):
if isinstance(path, Bag):
changeBag = path
for changeNode in changeBag:
attr = changeNode.attr
datachange = ClientDataChange(attr.pop('_client_path'), changeNode.value,
attributes=attr, fired=attr.pop('fired', None))
self.local_datachanges.append(datachange)
else:
datachange = ClientDataChange(path, value, reason=reason, attributes=attributes, fired=fired)
self.local_datachanges.append(datachange)
else:
with self.clientPage(page_id=page_id) as clientPage:
clientPage.set(path, value, attributes=attributes, reason=reason, fired=fired)
def rpc_sendMessageToClient(self, message, pageId=None, filters=None, msg_path=None):
self.site.sendMessageToClient(message, pageId=pageId, filters=filters, origin=self, msg_path=msg_path)
def _get_package_folder(self):
if not hasattr(self, '_package_folder'):
self._package_folder = os.path.join(self.site.gnrapp.packages[self.packageId].packageFolder, 'webpages')
return self._package_folder
package_folder = property(_get_package_folder)
def rpc_main(self, _auth=AUTH_OK, debugger=None, **kwargs):
page = self.domSrcFactory.makeRoot(self)
self._root = page
pageattr = {}
#try :
if True:
if _auth == AUTH_OK:
avatar = self.avatar #force get_avatar
if hasattr(self, 'main_root'):
self.main_root(page, **kwargs)
return (page, pageattr)
#page.script('genro.dom.windowTitle("%s")' % self.windowTitle())
dbselect_cache = None
if self.user:
dbselect_cache = self.getUserPreference(path='cache.dbselect', pkg='sys')
if dbselect_cache is None:
dbselect_cache = self.site.config['client_cache?dbselect']
if dbselect_cache:
page.script('genro.cache_dbselect = true')
page.data('gnr.windowTitle', self.windowTitle())
page.dataRemote('gnr._pageStore','getPageStoreData',cacheTime=1)
page.dataController("genro.publish('dbevent_'+_node.label,{'changelist':change.getItem('#0'),pkeycol:_node.attr.pkeycol});",change="^gnr.dbchanges")
page.data('gnr.homepage', self.externalUrl(self.site.homepage))
page.data('gnr.homeFolder', self.externalUrl(self.site.home_uri).rstrip('/'))
page.data('gnr.homeUrl', self.site.home_uri)
#page.data('gnr.userTags', self.userTags)
page.data('gnr.locale', self.locale)
page.data('gnr.pagename', self.pagename)
if not self.isGuest:
page.dataRemote('gnr.user_preference', 'getUserPreference')
page.dataRemote('gnr.app_preference', 'getAppPreference')
page.dataController('genro.dlg.serverMessage("gnr.servermsg");', _fired='^gnr.servermsg')
page.dataController("genro.getDataNode(nodePath).refresh(true);",
nodePath="^gnr.serverEvent.refreshNode")
page.dataController('if(url){genro.download(url)};', url='^gnr.downloadurl')
page.dataController("""if(url){
genro.download(url,null,"print")
};""", url='^gnr.printurl')
page.dataController('console.log(msg);funcCreate(msg)();', msg='^gnr.servercode')
root = page.borderContainer(design='sidebar', height='100%', nodeId='_gnrRoot',
_class='hideSplitter notvisible',
regions='^_clientCtx.mainBC')
typekit_code = self.site.config['gui?typekit']
if typekit_code:
page.script(src="http://use.typekit.com/%s.js" % typekit_code)
page.dataController("try{Typekit.load();}catch(e){}", _onStart=True)
#self.debugger.right_pane(root)
#self.debugger.bottom_pane(root)
self.mainLeftContent(root, region='left', splitter=True, nodeId='gnr_main_left')
root.div(_class='trash_drop', dropTarget=True, dropTypes='trashable', id='trash_drop',
onDrop_trashable="""var sourceNode=genro.src.nodeBySourceNodeId(dropInfo.dragSourceInfo._id);
if(sourceNode&&sourceNode.attr.onTrashed){
funcCreate(sourceNode.attr.onTrashed,'data,dropInfo',sourceNode)(data,dropInfo);
}""")
root.div(id='auxDragImage')
root.div(id='srcHighlighter')
root.dataController("""
var new_status = main_left_set_status[0];
new_status = new_status=='toggle'? !current_status:new_status;
if(new_status!=current_status){
SET _clientCtx.mainBC.left?show=new_status;
left_width = left_width || '';
if(new_status && left_width.replace('px','')<200){
SET _clientCtx.mainBC.left = '200px';
}
PUBLISH main_left_status = new_status;
}
""", subscribe_main_left_set_status=True,
current_status='=_clientCtx.mainBC.left?show', left_width='=_clientCtx.mainBC.left')
rootwdg = self.rootWidget(root, region='center', nodeId='_pageRoot')
self.main(rootwdg, **kwargs)
self.onMainCalls()
if self.avatar:
page.data('gnr.avatar', Bag(self.avatar.as_dict()))
page.data('gnr.polling.user_polling', self.user_polling)
page.data('gnr.polling.auto_polling', self.auto_polling)
page.dataController("""genro.user_polling = user_polling;
genro.auto_polling = auto_polling;
""",
user_polling="^gnr.polling.user_polling",
auto_polling="^gnr.polling.auto_polling",
_onStart=True)
if self._pendingContextToCreate:
self._createContext(root, self._pendingContextToCreate)
if self.user:
self.site.pageLog('open')
elif _auth == AUTH_NOT_LOGGED:
loginUrl = self.application.loginUrl()
if not loginUrl.startswith('/'):
loginUrl = self.site.home_uri + loginUrl
page = None
if loginUrl:
pageattr['redirect'] = loginUrl
else:
pageattr['redirect'] = self.resolvePathAsUrl('simplelogin.py', folder='*common')
else:
self.forbiddenPage(page, **kwargs)
return (page, pageattr)
#except Exception,err:
else:
return (self._errorPage(err), pageattr)
def onMain(self): #You CAN override this !
pass
def rpc_getPageStoreData(self):
return self.pageStore().getItem('')
def mainLeftTop(self, pane):
pass
def mainLeftContent(self, parentBC, **kwargs):
plugin_list = getattr(self, 'plugin_list', None)
if not plugin_list:
return
bc = parentBC.borderContainer(_class='main_left_tab', width='200px', datapath='gnr.main_container.left',
**kwargs)
self.mainLeftTop(bc.contentPane(region='top', nodeId='gnr_main_left_bottom', id='gnr_main_left_top'))
bottom = bc.contentPane(region='bottom', nodeId='gnr_main_left_bottom', id='gnr_main_left_bottom',
overflow='hidden')
plugin_dock = bottom.slotBar(slots='*,%s,*' %self.plugin_list)
sc = bc.stackContainer(selectedPage='^.selected', region='center', nodeId='gnr_main_left_center')
sc.dataController("""genro.publish(page+'_'+(selected?'on':'off'));
genro.dom.setClass(genro.domById('plugin_block_'+page),'selected_plugin',selected);
""",
subscribe_gnr_main_left_center_selected=True)
sc.dataController("""
var command= main_left_status[0]?'open':'close';
genro.publish(page+'_'+(command=='open'?'on':'off'));
""",
subscribe_main_left_status=True,
page='=.selected')
for plugin in self.plugin_list.split(','):
cb = getattr(self, 'mainLeft_%s' % plugin)
assert cb, 'Plugin %s not found' % plugin
cb(sc.contentPane(pageName=plugin))
sc.dataController("""
PUBLISH main_left_set_status = true;
SET .selected=plugin;
""", **{'subscribe_%s_open' % plugin: True, 'plugin': plugin})
getattr(plugin_dock,plugin).div(_class='plugin_block %s_icon' % plugin,
connect_onclick="""SET .selected="%s";""" % plugin,
id='plugin_block_%s' % plugin)
def onMainCalls(self):
calls = [m for m in dir(self) if m.startswith('onMain_')]
for m in calls:
getattr(self, m)()
self.onMain()
def rpc_onClosePage(self, **kwargs):
self.site.onClosePage(self)
#self.pageFolderRemove()
def pageFolderRemove(self):
shutil.rmtree(os.path.join(self.connectionFolder, self.page_id), True)
def rpc_callTableScript(self, table=None, respath=None, class_name='Main', downloadAs=None, **kwargs):
"""Call a script from a table's resources (i.e. ``_resources/tables/<table>/<respath>``).
This is typically used to customize prints and batch jobs for a particular installation.
"""
if downloadAs:
import mimetypes
self.response.content_type = mimetypes.guess_type(downloadAs)[0]
self.response.add_header("Content-Disposition", str("attachment; filename=%s" % downloadAs))
return self.site.callTableScript(page=self, table=table, respath=respath, class_name=class_name,
downloadAs=downloadAs, **kwargs)
def rpc_remoteBuilder(self, handler=None, **kwargs):
handler = self.getPublicMethod('remote', handler)
if handler:
pane = self.newSourceRoot()
self._root = pane
for k, v in kwargs.items():
if k.endswith('_path'):
kwargs[k[0:-5]] = kwargs.pop(k)[1:]
handler(pane, **kwargs)
return pane
def rpc_ping(self, **kwargs):
pass
def rpc_setInServer(self, path, value=None, pageId=None, **kwargs):
with self.pageStore(pageId) as store:
store.setItem(path, value)
#def rpc_setViewColumns(self, contextTable=None, gridId=None, relation_path=None, contextName=None,
# query_columns=None, **kwargs):
# self.app.setContextJoinColumns(table=contextTable, contextName=contextName, reason=gridId,
# path=relation_path, columns=query_columns)
def rpc_getPrinters(self):
print_handler = self.getService('print')
if print_handler:
return print_handler.getPrinters()
def rpc_getPrinterAttributes(self, printer_name):
if printer_name and printer_name != 'PDF':
attributes = self.getService('print').getPrinterAttributes(printer_name)
return attributes
def rpc_relationExplorer(self, table=None, prevRelation='', prevCaption='',
omit='', **kwargs):
if not table:
return Bag()
def buildLinkResolver(node, prevRelation, prevCaption):
nodeattr = node.getAttr()
if not 'name_long' in nodeattr:
raise Exception(nodeattr) # FIXME: use a specific exception class
nodeattr['caption'] = nodeattr.pop('name_long')
nodeattr['fullcaption'] = concat(prevCaption, self._(nodeattr['caption']), '/')
if nodeattr.get('one_relation'):
nodeattr['_T'] = 'JS'
if nodeattr['mode'] == 'O':
relpkg, reltbl, relfld = nodeattr['one_relation'].split('.')
else:
relpkg, reltbl, relfld = nodeattr['many_relation'].split('.')
jsresolver = "genro.rpc.remoteResolver('relationExplorer',{table:%s, prevRelation:%s, prevCaption:%s, omit:%s})"
node.setValue(jsresolver % (
jsquote("%s.%s" % (relpkg, reltbl)), jsquote(concat(prevRelation, node.label)),
jsquote(nodeattr['fullcaption']), jsquote(omit)))
result = self.db.relationExplorer(table=table,
prevRelation=prevRelation,
omit=omit,
**kwargs)
result.walk(buildLinkResolver, prevRelation=prevRelation, prevCaption=prevCaption)
return result
def rpc_setInClientPage(self, pageId=None, changepath=None, value=None, fired=None, attr=None, reason=None):
with self.clientPage(pageId) as clientPage:
clientPage.set(changepath, value, attr=attr, reason=reason, fired=fired)
def getAuxInstance(self, name):
return self.site.getAuxInstance(name)
def _get_connectionFolder(self):
return os.path.join(self.site.allConnectionsFolder, self.connection_id)
connectionFolder = property(_get_connectionFolder)
def _get_userFolder(self):
user = self.user or 'Anonymous'
return os.path.join(self.site.allUsersFolder, user)
userFolder = property(_get_userFolder)
def temporaryDocument(self, *args):
return self.connectionDocument('temp', *args)
def temporaryDocumentUrl(self, *args, **kwargs):
return self.connectionDocumentUrl('temp', *args, **kwargs)
def connectionDocument(self, *args):
filepath = os.path.join(self.connectionFolder, self.page_id, *args)
folder = os.path.dirname(filepath)
if not os.path.isdir(folder):
os.makedirs(folder)
return filepath
def userDocument(self, *args):
filepath = os.path.join(self.userFolder, *args)
folder = os.path.dirname(filepath)
if not os.path.isdir(folder):
os.makedirs(folder)
return filepath
def connectionDocumentUrl(self, *args, **kwargs):
if kwargs:
return self.site.getStatic('conn').kwargs_url(self.connection_id, self.page_id, *args, **kwargs)
else:
return self.site.getStatic('conn').url(self.connection_id, self.page_id, *args)
def userDocumentUrl(self, *args, **kwargs):
if kwargs:
return self.site.getStatic('user').kwargs_url(self.user, *args, **kwargs_url)
else:
return self.site.getStatic('user').url(self.user, *args)
def isLocalizer(self):
return (self.userTags and ('_TRD_' in self.userTags))
def isDeveloper(self):
return (self.userTags and ('_DEV_' in self.userTags))
def addToContext(self, value=None, serverpath=None, clientpath=None):
self._pendingContextToCreate.append((value, serverpath, clientpath or serverpath))
def _createContext(self, root, pendingContext):
with self.pageStore() as store:
for value, serverpath, clientpath in pendingContext:
store.setItem(serverpath, value)
for value, serverpath, clientpath in pendingContext:
root.child('data', __cls='bag', content=value, path=clientpath, _serverpath=serverpath)
def setJoinCondition(self, ctxname, target_fld='*', from_fld='*', condition=None, one_one=None, applymethod=None,
**kwargs):
"""define join condition in a given context (ctxname)
the condition is used to limit the automatic selection of related records
If target_fld AND from_fld equals to '*' the condition is an additional where clause added to any selection
self.setJoinCondition('mycontext',
target_fld = 'mypkg.rows.document_id',
from_fld = 'mypkg.document.id',
condition = "mypkg.rows.date <= :join_wkd",
join_wkd = "^mydatacontext.foo.bar.mydate", one_one=False)
@param ctxname: name of the context of the main record
@param target_fld: the many table column of the relation, '*' means the main table of the selection
@param from_fld: the one table column of the relation, '*' means the main table of the selection
@param condition: the sql condition
@param one_one: the result is returned as a record instead of as a selection.
If one_one is True the given condition MUST return always a single record
@param applymethod: a page method to be called after selecting the related records
@param kwargs: named parameters to use in condition. Can be static values or can be readed
from the context at query time. If a parameter starts with '^' it is a path in
the context where the value is stored.
If a parameter is the name of a defined method the method is called and the result
is used as the parameter value. The method has to be defined as 'ctxname_methodname'.
"""
path = '%s.%s_%s' % (ctxname, target_fld.replace('.', '_'), from_fld.replace('.', '_'))
value = Bag(dict(target_fld=target_fld, from_fld=from_fld, condition=condition, one_one=one_one,
applymethod=applymethod, params=Bag(kwargs)))
self.addToContext(value=value, serverpath='_sqlctx.conditions.%s' % path,
clientpath='gnr.sqlctx.conditions.%s' % path)
#def setJoinColumns(self, ctxname, target_fld, from_fld, joincolumns):
# path = '%s.%s_%s' % (ctxname, target_fld.replace('.', '_'), from_fld.replace('.', '_'))
# serverpath = '_sqlctx.columns.%s' % path
# clientpath = 'gnr.sqlctx.columns.%s' % path
# self.addToContext(value=joincolumns, serverpath=serverpath, clientpath=clientpath)
def _prepareGridStruct(self,source=None,table=None,gridId=None):
struct = None
if isinstance(source, Bag):
return source
if gridId and not source:
source = getattr(self, '%s_struct' % gridId,None)
if callable(source):
struct = self.newGridStruct(maintable=table)
source(struct)
if hasattr(struct,'_missing_table'):
struct = None
return struct
if table:
tblobj = self.db.table(table)
if source:
handler = getattr(tblobj, 'baseView_%s' % source,None)
columns = handler() if handler else source
else:
columns= tblobj.baseViewColumns()
struct = self.newGridStruct(maintable=table)
rows = struct.view().rows()
rows.fields(columns)
return struct
def rpc_getGridStruct(self,struct,table):
return self._prepareGridStruct(struct,table)
def lazyBag(self, bag, name=None, location='page:resolvers'):
freeze_path = self.site.getStaticPath(location, name, autocreate=-1)
bag.makePicklable()
bag.pickle('%s.pik' % freeze_path)
return LazyBagResolver(resolverName=name, location=location, _page=self, sourceBag=bag)
##### BEGIN: DEPRECATED METHODS ###
@deprecated
def _get_config(self):
return self.site.config
config = property(_get_config)
@deprecated
def log(self, msg):
self.debugger.log(msg)
##### END: DEPRECATED METHODS #####
class LazyBagResolver(BagResolver):
classKwargs = {'cacheTime': -1,
'readOnly': False,
'resolverName': None,
'_page': None,
'sourceBag': None,
'location': None,
'path': None,
'filter':None}
classArgs = ['path']
def load(self):
if not self.sourceBag:
self.getSource()
sourceBag = self.sourceBag[self.path]
if self.filter:
flt,v=splitAndStrip(self.filter,'=',fixed=2)
if v:
cb=lambda n: flt in n.attr and v in n.attr[flt]
else:
cb=lambda n: flt in n.label
return sourceBag.filter(cb)
result = Bag()
for n in sourceBag:
value = n.value
if value and isinstance(value, Bag):
path = n.label if not self.path else '%s.%s' % (self.path, n.label)
value = LazyBagResolver(path=path, resolverName=self.resolverName, location=self.location)
result.setItem(n.label, value, n.attr)
return result
def getSource(self):
filepath = self._page.site.getStaticPath(self.location, self.resolverName)
self.sourceBag = Bag('%s.pik' % filepath)
class GnrMakoPage(GnrWebPage):
def onPreIniting(self, request_args, request_kwargs):
request_kwargs['_plugin'] = 'mako'
request_kwargs['mako_path'] = self.mako_template()
class ClientPageHandler(object):
"""proxi for making actions on client page"""
def __init__(self, parent_page, page_id=None):
self.parent_page = parent_page
self.page_id = page_id or parent_page.page_id
self.pageStore = self.parent_page.pageStore(page_id=self.page_id)
self.store = None
def set(self, path, value, attributes=None, fired=None, reason=None, replace=False):
self.store.set_datachange(path, value, attributes=attributes, fired=fired, reason=reason, replace=replace)
def __enter__(self):
self.store = self.pageStore.__enter__()
return self
def __exit__(self, type, value, tb):
self.pageStore.__exit__(type, value, tb)
def jsexec(self, path, value, **kwargs):
pass
def copyData(self, srcpath, dstpath=None, page_id=None):
"""
self.clientPage(page_id="nknnn").copyData('foo.bar','spam.egg') #copia sulla MIA pagina
self.clientPage(page_id="nknnn").copyData('foo.bar','bub.egg',page_id='xxxxxx') #copia sulla pagina xxxxxx
self.clientPage(page_id="nknnn").copyData('foo.bar','bub.egg',pageStore=True) #copia sul mio pageStore
self.clientPage(page_id="nknnn").copyData('foo.bar','bub.egg',page_id='xxxxxx' ,pageStore=True) #copia sul pageStore della pagina xxxx
"""
pass
class ClientDataChange(object):
def __init__(self, path, value, attributes=None, reason=None, fired=False,
change_ts=None, change_idx=None, delete=False, **kwargs):
self.path = path
self.reason = reason
self.value = value
self.attributes = attributes
self.fired = fired
self.change_ts = change_ts or datetime.datetime.now()
self.change_idx = change_idx
self.delete = delete
def __eq__(self, other):
return self.path == other.path and self.reason == other.reason and self.fired == other.fired
def update(self, other):
if hasattr(self.value, 'update') and hasattr(other.value, 'update'):
self.value.update(other.value)
else:
self.value = other.value
if other.attributes:
self.attributes = self.attributes or dict()
self.attributes.update(other.attributes)
def __str__(self):
return "Datachange path:%s, reason:%s, value:%s, attributes:%s" % (
self.path, self.reason, self.value, self.attributes)
def __repr__(self):
return "Datachange path:%s, reason:%s, value:%s, attributes:%s" % (
self.path, self.reason, self.value, self.attributes) |
fmfn/UnbalancedDataset | refs/heads/master | imblearn/combine/_smote_tomek.py | 1 | """Class to perform over-sampling using SMOTE and cleaning using Tomek
links."""
# Authors: Guillaume Lemaitre <g.lemaitre58@gmail.com>
# Christos Aridas
# License: MIT
from __future__ import division
from sklearn.base import clone
from sklearn.utils import check_X_y
from ..base import BaseSampler
from ..over_sampling import SMOTE
from ..over_sampling.base import BaseOverSampler
from ..under_sampling import TomekLinks
from ..utils import check_target_type
from ..utils import Substitution
from ..utils._docstring import _random_state_docstring
@Substitution(
sampling_strategy=BaseOverSampler._sampling_strategy_docstring,
random_state=_random_state_docstring)
class SMOTETomek(BaseSampler):
"""Class to perform over-sampling using SMOTE and cleaning using
Tomek links.
Combine over- and under-sampling using SMOTE and Tomek links.
Read more in the :ref:`User Guide <combine>`.
Parameters
----------
{sampling_strategy}
{random_state}
smote : object, optional (default=SMOTE())
The :class:`imblearn.over_sampling.SMOTE` object to use. If not given,
a :class:`imblearn.over_sampling.SMOTE` object with default parameters
will be given.
tomek : object, optional (default=TomekLinks(sampling_strategy='all'))
The :class:`imblearn.under_sampling.TomekLinks` object to use. If not
given, a :class:`imblearn.under_sampling.TomekLinks` object with
sampling strategy='all' will be given.
n_jobs : int, optional (default=1)
The number of threads to open if possible.
Will not apply to smote and tomek given by the user.
ratio : str, dict, or callable
.. deprecated:: 0.4
Use the parameter ``sampling_strategy`` instead. It will be removed
in 0.6.
Notes
-----
The methos is presented in [1]_.
Supports multi-class resampling. Refer to SMOTE and TomekLinks regarding
the scheme which used.
See also
--------
SMOTEENN : Over-sample using SMOTE followed by under-sampling using Edited
Nearest Neighbours.
References
----------
.. [1] G. Batista, B. Bazzan, M. Monard, "Balancing Training Data for
Automated Annotation of Keywords: a Case Study," In WOB, 10-18, 2003.
Examples
--------
>>> from collections import Counter
>>> from sklearn.datasets import make_classification
>>> from imblearn.combine import \
SMOTETomek # doctest: +NORMALIZE_WHITESPACE
>>> X, y = make_classification(n_classes=2, class_sep=2,
... weights=[0.1, 0.9], n_informative=3, n_redundant=1, flip_y=0,
... n_features=20, n_clusters_per_class=1, n_samples=1000, random_state=10)
>>> print('Original dataset shape %s' % Counter(y))
Original dataset shape Counter({{1: 900, 0: 100}})
>>> smt = SMOTETomek(random_state=42)
>>> X_res, y_res = smt.fit_resample(X, y)
>>> print('Resampled dataset shape %s' % Counter(y_res))
Resampled dataset shape Counter({{0: 900, 1: 900}})
"""
_sampling_type = 'over-sampling'
def __init__(self,
sampling_strategy='auto',
random_state=None,
smote=None,
tomek=None,
n_jobs=1,
ratio=None):
super(SMOTETomek, self).__init__()
self.sampling_strategy = sampling_strategy
self.random_state = random_state
self.smote = smote
self.tomek = tomek
self.n_jobs = n_jobs
self.ratio = ratio
def _validate_estimator(self):
"Private function to validate SMOTE and ENN objects"
if self.smote is not None:
if isinstance(self.smote, SMOTE):
self.smote_ = clone(self.smote)
else:
raise ValueError('smote needs to be a SMOTE object.'
'Got {} instead.'.format(type(self.smote)))
# Otherwise create a default SMOTE
else:
self.smote_ = SMOTE(
sampling_strategy=self.sampling_strategy,
random_state=self.random_state,
n_jobs=self.n_jobs,
ratio=self.ratio)
if self.tomek is not None:
if isinstance(self.tomek, TomekLinks):
self.tomek_ = clone(self.tomek)
else:
raise ValueError('tomek needs to be a TomekLinks object.'
'Got {} instead.'.format(type(self.tomek)))
# Otherwise create a default TomekLinks
else:
self.tomek_ = TomekLinks(
sampling_strategy='all',
n_jobs=self.n_jobs)
def _fit_resample(self, X, y):
self._validate_estimator()
y = check_target_type(y)
X, y = check_X_y(X, y, accept_sparse=['csr', 'csc'])
self.sampling_strategy_ = self.sampling_strategy
X_res, y_res = self.smote_.fit_resample(X, y)
return self.tomek_.fit_resample(X_res, y_res)
|
mano3m/CouchPotatoServer | refs/heads/develop_mano3m | libs/suds/mx/literal.py | 179 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides literal I{marshaller} classes.
"""
from logging import getLogger
from suds import *
from suds.mx import *
from suds.mx.core import Core
from suds.mx.typer import Typer
from suds.resolver import GraphResolver, Frame
from suds.sax.element import Element
from suds.sudsobject import Factory
log = getLogger(__name__)
#
# Add typed extensions
# type = The expected xsd type
# real = The 'true' XSD type
# ancestry = The 'type' ancestry
#
Content.extensions.append('type')
Content.extensions.append('real')
Content.extensions.append('ancestry')
class Typed(Core):
"""
A I{typed} marshaller.
This marshaller is semi-typed as needed to support both
I{document/literal} and I{rpc/literal} soap message styles.
@ivar schema: An xsd schema.
@type schema: L{xsd.schema.Schema}
@ivar resolver: A schema type resolver.
@type resolver: L{GraphResolver}
"""
def __init__(self, schema, xstq=True):
"""
@param schema: A schema object
@type schema: L{xsd.schema.Schema}
@param xstq: The B{x}ml B{s}chema B{t}ype B{q}ualified flag indicates
that the I{xsi:type} attribute values should be qualified by namespace.
@type xstq: bool
"""
Core.__init__(self)
self.schema = schema
self.xstq = xstq
self.resolver = GraphResolver(self.schema)
def reset(self):
self.resolver.reset()
def start(self, content):
#
# Start marshalling the 'content' by ensuring that both the
# 'content' _and_ the resolver are primed with the XSD type
# information. The 'content' value is both translated and
# sorted based on the XSD type. Only values that are objects
# have their attributes sorted.
#
log.debug('starting content:\n%s', content)
if content.type is None:
name = content.tag
if name.startswith('_'):
name = '@'+name[1:]
content.type = self.resolver.find(name, content.value)
if content.type is None:
raise TypeNotFound(content.tag)
else:
known = None
if isinstance(content.value, Object):
known = self.resolver.known(content.value)
if known is None:
log.debug('object has no type information', content.value)
known = content.type
frame = Frame(content.type, resolved=known)
self.resolver.push(frame)
frame = self.resolver.top()
content.real = frame.resolved
content.ancestry = frame.ancestry
self.translate(content)
self.sort(content)
if self.skip(content):
log.debug('skipping (optional) content:\n%s', content)
self.resolver.pop()
return False
else:
return True
def suspend(self, content):
#
# Suspend to process a list content. Primarily, this
# involves popping the 'list' content off the resolver's
# stack so the list items can be marshalled.
#
self.resolver.pop()
def resume(self, content):
#
# Resume processing a list content. To do this, we
# really need to simply push the 'list' content
# back onto the resolver stack.
#
self.resolver.push(Frame(content.type))
def end(self, parent, content):
#
# End processing the content. Make sure the content
# ending matches the top of the resolver stack since for
# list processing we play games with the resolver stack.
#
log.debug('ending content:\n%s', content)
current = self.resolver.top().type
if current == content.type:
self.resolver.pop()
else:
raise Exception, \
'content (end) mismatch: top=(%s) cont=(%s)' % \
(current, content)
def node(self, content):
#
# Create an XML node and namespace qualify as defined
# by the schema (elementFormDefault).
#
ns = content.type.namespace()
if content.type.form_qualified:
node = Element(content.tag, ns=ns)
node.addPrefix(ns[0], ns[1])
else:
node = Element(content.tag)
self.encode(node, content)
log.debug('created - node:\n%s', node)
return node
def setnil(self, node, content):
#
# Set the 'node' nil only if the XSD type
# specifies that it is permitted.
#
if content.type.nillable:
node.setnil()
def setdefault(self, node, content):
#
# Set the node to the default value specified
# by the XSD type.
#
default = content.type.default
if default is None:
pass
else:
node.setText(default)
return default
def optional(self, content):
if content.type.optional():
return True
for a in content.ancestry:
if a.optional():
return True
return False
def encode(self, node, content):
# Add (soap) encoding information only if the resolved
# type is derived by extension. Further, the xsi:type values
# is qualified by namespace only if the content (tag) and
# referenced type are in different namespaces.
if content.type.any():
return
if not content.real.extension():
return
if content.type.resolve() == content.real:
return
ns = None
name = content.real.name
if self.xstq:
ns = content.real.namespace('ns1')
Typer.manual(node, name, ns)
def skip(self, content):
"""
Get whether to skip this I{content}.
Should be skipped when the content is optional
and either the value=None or the value is an empty list.
@param content: The content to skip.
@type content: L{Object}
@return: True if content is to be skipped.
@rtype: bool
"""
if self.optional(content):
v = content.value
if v is None:
return True
if isinstance(v, (list,tuple)) and len(v) == 0:
return True
return False
def optional(self, content):
if content.type.optional():
return True
for a in content.ancestry:
if a.optional():
return True
return False
def translate(self, content):
"""
Translate using the XSD type information.
Python I{dict} is translated to a suds object. Most
importantly, primative values are translated from python
types to XML types using the XSD type.
@param content: The content to translate.
@type content: L{Object}
@return: self
@rtype: L{Typed}
"""
v = content.value
if v is None:
return
if isinstance(v, dict):
cls = content.real.name
content.value = Factory.object(cls, v)
md = content.value.__metadata__
md.sxtype = content.type
return
v = content.real.translate(v, False)
content.value = v
return self
def sort(self, content):
"""
Sort suds object attributes based on ordering defined
in the XSD type information.
@param content: The content to sort.
@type content: L{Object}
@return: self
@rtype: L{Typed}
"""
v = content.value
if isinstance(v, Object):
md = v.__metadata__
md.ordering = self.ordering(content.real)
return self
def ordering(self, type):
"""
Get the attribute ordering defined in the specified
XSD type information.
@param type: An XSD type object.
@type type: SchemaObject
@return: An ordered list of attribute names.
@rtype: list
"""
result = []
for child, ancestry in type.resolve():
name = child.name
if child.name is None:
continue
if child.isattr():
name = '_%s' % child.name
result.append(name)
return result
class Literal(Typed):
"""
A I{literal} marshaller.
This marshaller is semi-typed as needed to support both
I{document/literal} and I{rpc/literal} soap message styles.
"""
pass |
pwong-mapr/private-hue | refs/heads/HUE-1096-abe | desktop/core/ext-py/Django-1.4.5/django/contrib/comments/views/moderation.py | 50 | from __future__ import absolute_import
from django import template
from django.conf import settings
from django.contrib import comments
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.comments import signals
from django.contrib.comments.views.utils import next_redirect, confirmation_view
from django.shortcuts import get_object_or_404, render_to_response
from django.views.decorators.csrf import csrf_protect
@csrf_protect
@login_required
def flag(request, comment_id, next=None):
"""
Flags a comment. Confirmation on GET, action on POST.
Templates: `comments/flag.html`,
Context:
comment
the flagged `comments.comment` object
"""
comment = get_object_or_404(comments.get_model(), pk=comment_id, site__pk=settings.SITE_ID)
# Flag on POST
if request.method == 'POST':
perform_flag(request, comment)
return next_redirect(request, next, flag_done, c=comment.pk)
# Render a form on GET
else:
return render_to_response('comments/flag.html',
{'comment': comment, "next": next},
template.RequestContext(request)
)
@csrf_protect
@permission_required("comments.can_moderate")
def delete(request, comment_id, next=None):
"""
Deletes a comment. Confirmation on GET, action on POST. Requires the "can
moderate comments" permission.
Templates: `comments/delete.html`,
Context:
comment
the flagged `comments.comment` object
"""
comment = get_object_or_404(comments.get_model(), pk=comment_id, site__pk=settings.SITE_ID)
# Delete on POST
if request.method == 'POST':
# Flag the comment as deleted instead of actually deleting it.
perform_delete(request, comment)
return next_redirect(request, next, delete_done, c=comment.pk)
# Render a form on GET
else:
return render_to_response('comments/delete.html',
{'comment': comment, "next": next},
template.RequestContext(request)
)
@csrf_protect
@permission_required("comments.can_moderate")
def approve(request, comment_id, next=None):
"""
Approve a comment (that is, mark it as public and non-removed). Confirmation
on GET, action on POST. Requires the "can moderate comments" permission.
Templates: `comments/approve.html`,
Context:
comment
the `comments.comment` object for approval
"""
comment = get_object_or_404(comments.get_model(), pk=comment_id, site__pk=settings.SITE_ID)
# Delete on POST
if request.method == 'POST':
# Flag the comment as approved.
perform_approve(request, comment)
return next_redirect(request, next, approve_done, c=comment.pk)
# Render a form on GET
else:
return render_to_response('comments/approve.html',
{'comment': comment, "next": next},
template.RequestContext(request)
)
# The following functions actually perform the various flag/aprove/delete
# actions. They've been broken out into separate functions to that they
# may be called from admin actions.
def perform_flag(request, comment):
"""
Actually perform the flagging of a comment from a request.
"""
flag, created = comments.models.CommentFlag.objects.get_or_create(
comment = comment,
user = request.user,
flag = comments.models.CommentFlag.SUGGEST_REMOVAL
)
signals.comment_was_flagged.send(
sender = comment.__class__,
comment = comment,
flag = flag,
created = created,
request = request,
)
def perform_delete(request, comment):
flag, created = comments.models.CommentFlag.objects.get_or_create(
comment = comment,
user = request.user,
flag = comments.models.CommentFlag.MODERATOR_DELETION
)
comment.is_removed = True
comment.save()
signals.comment_was_flagged.send(
sender = comment.__class__,
comment = comment,
flag = flag,
created = created,
request = request,
)
def perform_approve(request, comment):
flag, created = comments.models.CommentFlag.objects.get_or_create(
comment = comment,
user = request.user,
flag = comments.models.CommentFlag.MODERATOR_APPROVAL,
)
comment.is_removed = False
comment.is_public = True
comment.save()
signals.comment_was_flagged.send(
sender = comment.__class__,
comment = comment,
flag = flag,
created = created,
request = request,
)
# Confirmation views.
flag_done = confirmation_view(
template = "comments/flagged.html",
doc = 'Displays a "comment was flagged" success page.'
)
delete_done = confirmation_view(
template = "comments/deleted.html",
doc = 'Displays a "comment was deleted" success page.'
)
approve_done = confirmation_view(
template = "comments/approved.html",
doc = 'Displays a "comment was approved" success page.'
)
|
lepistone/vertical-ngo | refs/heads/8.0 | __unported__/logistic_requisition/wizard/transport_plan.py | 1 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joël Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more description.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class logistic_requisition_source_transport_plan(orm.TransientModel):
_name = 'logistic.requisition.source.transport.plan'
_description = 'Create a transport plan for logistic requisition source lines'
_columns = {
'date_eta': fields.date(
'ETA',
required=True,
help="Estimated Date of Arrival"
" if not set requisition ETA will be used"
),
'date_etd': fields.date(
'ETD',
help="Estimated Date of Departure"
),
'from_address_id': fields.many2one(
'res.partner', 'From Address',
required=True
),
'to_address_id': fields.many2one(
'res.partner', 'To Address',
required=True
),
'transport_estimated_cost': fields.float(
'Transportation Estimated Costs',
digits_compute=dp.get_precision('Account'),
),
'transport_mode_id': fields.many2one(
'transport.mode',
string='Transport by',
),
'note': fields.text('Remarks/Description'),
}
def _get_default_lines(self, cr, uid, context=None):
active_ids = context.get('active_ids')
if not active_ids:
return []
req_source_obj = self.pool['logistic.requisition.source']
lines = req_source_obj.browse(cr, uid, active_ids, context=context)
return lines
def _get_default_transport_mode(self, cr, uid, context=None):
""" Set the default value for transport mode using
preffered LR mode, if lines came from differents
requisitions nothing is set"""
if context is None:
return False
lines = self._get_default_lines(cr, uid, context=context)
if any(lines[0].requisition_id != x.requisition_id for x in lines):
return False
return lines[0].requisition_id.preferred_transport.id
def _get_default_date_eta_from_lines(self, cr, uid, context=None):
""" Set the default eta_date value"""
if context is None:
return False
lines = self._get_default_lines(cr, uid, context=context)
if any(lines[0].requisition_line_id != x.requisition_line_id for x in lines):
return False
return lines[0].requisition_line_id.date_delivery
def _get_default_from_address(self, cr, uid, context=None):
""" Set the default source address,
if lines came from differents
requisitions nothing is set"""
if context is None:
return False
lines = self._get_default_lines(cr, uid, context=context)
if any(lines[0].requisition_id != x.requisition_id for x in lines):
return False
if any(lines[0].default_source_address != x.default_source_address
for x in lines):
return False
return lines[0].default_source_address.id
def _get_default_to_address(self, cr, uid, context=None):
""" Set the default destination address,
if lines came from differents
requisitions nothing is set"""
if context is None:
return False
lines = self._get_default_lines(cr, uid, context=context)
if any(lines[0].requisition_id != x.requisition_id for x in lines):
return False
return lines[0].requisition_id.consignee_shipping_id.id
_defaults = {'transport_mode_id': _get_default_transport_mode,
'date_eta': _get_default_date_eta_from_lines,
'from_address_id': _get_default_from_address,
'to_address_id': _get_default_to_address}
def _prepare_transport_plan(self, cr, uid, form,
line_brs, context=None):
""" Prepare the values for the creation of a transport plan
from a selection of requisition lines.
"""
vals = {'date_eta': form.date_eta,
'date_etd': form.date_etd,
'from_address_id': form.from_address_id.id,
'to_address_id': form.to_address_id.id,
'transport_estimated_cost': form.transport_estimated_cost,
'transport_mode_id': form.transport_mode_id.id,
'note': form.note,
}
return vals
def create_and_affect(self, cr, uid, ids, context=None):
if context is None:
context = {}
source_ids = context.get('active_ids')
if not source_ids:
return
assert len(ids) == 1, "One ID expected"
form = self.browse(cr, uid, ids[0], context=context)
transport_obj = self.pool.get('transport.plan')
source_obj = self.pool.get('logistic.requisition.source')
lines = source_obj.browse(cr, uid, source_ids, context=context)
vals = self._prepare_transport_plan(cr, uid, form, lines, context=context)
transport_id = transport_obj.create(cr, uid, vals, context=context)
source_obj.write(cr, uid, source_ids,
{'transport_plan_id': transport_id,
'transport_applicable': True},
context=context)
return self._open_transport_plan(cr, uid, transport_id, context=context)
def _open_transport_plan(self, cr, uid, transport_id, context=None):
return {
'name': _('Transport Plan'),
'view_mode': 'form',
'res_model': 'transport.plan',
'res_id': transport_id,
'target': 'current',
'view_id': False,
'context': {},
'type': 'ir.actions.act_window',
}
|
Mikkyo/heuristic_PRD | refs/heads/master | models/Task.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# --- Import Area
from enums.TaskStatus import TaskStatus
class Task:
"""Class to represent a container"""
# --- Attributes
# Private
_resource = None # The resource's task
_duration = None # The task duration
_parent_tasks = None # The parent task
_child_tasks = None # The child task
_min_start_date = None # The minimum start date
_max_start_date = None # The maximum start date
_start_date = None # The start date
_progress = None # Progess of task
_criticality = None # Criticality of task
_container = None # The container task
_status = None # Task Status
# Constants
# Criticity Score Constants
CRIT_KD = 0.2 # weight for the duration crit
CRIT_D_MAX = 6 # Max duration before duration crit stops
CRIT_KR = 0.8 # weight for the delay crit
CRIT_R_MAX = 40 # Max delay before delay crit stops
# --- Constructor
def __init__(self, resource, duration, parent_tasks, child_tasks):
"""
Constructor
:param resource: Resource Object
:param duration: Integer
:param parent_tasks: Task Array
:param child_tasks: Task Array
"""
self._resource = resource
self._duration = duration
self._parent_tasks = parent_tasks
self._child_tasks = child_tasks
self._min_start_date = -1
self._max_start_date = 0.0
self._criticality = 0.0
self._container = None
self._status = TaskStatus.UNKNOWN
# --- Methods
def is_root(self):
"""
Method to check if a task is a root
:return Bool
"""
if len(self._parent_tasks) > 0:
return True
return False
def is_leaf(self):
"""
Method to check if a task is a leaf
:return Bool
"""
if len(self._child_tasks) > 0:
return True
return False
def is_ready(self):
"""
Method to check if a task is ready to be scheduled
:return Bool
"""
if self._status is TaskStatus.READY:
return True
return False
def is_running(self):
"""
Method to check if a task is running
:return Bool
"""
if self._status is TaskStatus.RUNNING:
return True
return False
def is_finished(self):
"""
Method to check if a task is finished
:return Bool
"""
if self._status is TaskStatus.FINISHED:
return True
return False
def is_current_root(self):
"""
Method to check if a task is the current root
:return Bool
"""
if (self._status < TaskStatus.FINISHED) & (self._status > TaskStatus.PENDING):
return True
return False
def reset_start_dates(self):
"""
Method to reset all start dates
"""
self._min_start_date = 0
self._max_start_date = 999999
def set_min_start_date(self, date):
"""
Method to set the minimum starting date
:param date: Integer
:return None if it's finished
"""
if self.is_finished() : return
self._min_start_date = max(self._min_start_date, date)
for i in range(0, len(self._child_tasks)):
self._child_tasks[i].set_min_start_date(self._min_start_date + self._duration)
def set_max_start_date(self, date):
"""
Method to set the maximum starting date
:param date: Integer
:return None if it's finished
"""
if self.is_finished(): return
self._max_start_date = min(self._max_start_date, date)
for i in range(0, len(self._parent_tasks)):
self._parent_tasks[i].set_max_start_date(self._max_start_date - self._duration)
def start(self, container, simulation_date):
"""
Method to set the begining of a task
:param container: Container Object
:param simulation_date: Integer
"""
self._start_date = simulation_date
self._container = container
self._status = TaskStatus.RUNNING
def preempt(self):
"""
Method to preempt a task
"""
if self.is_running() :
self._start_date = -1
self._progress = 0.0
self._criticality = 0.0
self._container = None
self._status = TaskStatus.READY
def finish(self, simulation_date):
"""
Method to finish a task
:param simulation_date: Integer
"""
self._criticality = 0.0
self._container.remove_task(self, simulation_date)
self._container = None
self._status = TaskStatus.FINISHED
for i in range(0, len(self._child_tasks)):
self._child_tasks[i].status = TaskStatus.READY
def compute_criticality(self):
"""
Method to compute criticality
"""
if self.is_finished(): return
# This calculation is based on Nicolas Gougeon heuristic
criticality = self.CRIT_KD * min(self._duration/ self.CRIT_D_MAX, 1.0)
criticality += self.CRIT_KR * max(1.0 - (self._max_start_date - self._min_start_date) / self.CRIT_R_MAX,0.0)
self._criticality = criticality / (self.CRIT_KD + self.CRIT_KR) # To normalize the result
def update(self, simulation_date):
"""
Methode to update the task
:param simulation_date: Integer
"""
if self.is_running() :
self._progress = (simulation_date - self._start_date) / self._duration
self._container.update(simulation_date) #Update the container priority
if (simulation_date - self._start_date) is self._duration:
self.finish(simulation_date)
# --- Getters/Setters
|
veltzer/pycmdtools | refs/heads/master | pycmdtools/static.py | 1 | """ version which can be consumed from within the module """
VERSION_STR = "0.0.73"
DESCRIPTION = "pycmdtools is set of useful command line tools written in python"
APP_NAME = "pycmdtools"
LOGGER_NAME = "pycmdtools"
|
cedk/odoo | refs/heads/8.0 | addons/account_check_writing/wizard/__init__.py | 437 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_check_batch_printing
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
foreni-packages/golismero | refs/heads/master | thirdparty_libs/nltk/draw/tree.py | 17 | # Natural Language Toolkit: Graphical Representations for Trees
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
"""
Graphically display a Tree.
"""
import sys
from Tkinter import IntVar, Menu, Tk
from nltk.util import in_idle
from nltk.tree import Tree
from nltk.draw.util import (CanvasFrame, CanvasWidget, BoxWidget,
TextWidget, ParenWidget, OvalWidget)
##//////////////////////////////////////////////////////
## Tree Segment
##//////////////////////////////////////////////////////
class TreeSegmentWidget(CanvasWidget):
"""
A canvas widget that displays a single segment of a hierarchical
tree. Each ``TreeSegmentWidget`` connects a single "node widget"
to a sequence of zero or more "subtree widgets". By default, the
bottom of the node is connected to the top of each subtree by a
single line. However, if the ``roof`` attribute is set, then a
single triangular "roof" will connect the node to all of its
children.
Attributes:
- ``roof``: What sort of connection to draw between the node and
its subtrees. If ``roof`` is true, draw a single triangular
"roof" over the subtrees. If ``roof`` is false, draw a line
between each subtree and the node. Default value is false.
- ``xspace``: The amount of horizontal space to leave between
subtrees when managing this widget. Default value is 10.
- ``yspace``: The amount of space to place between the node and
its children when managing this widget. Default value is 15.
- ``color``: The color of the lines connecting the node to its
subtrees; and of the outline of the triangular roof. Default
value is ``'#006060'``.
- ``fill``: The fill color for the triangular roof. Default
value is ``''`` (no fill).
- ``width``: The width of the lines connecting the node to its
subtrees; and of the outline of the triangular roof. Default
value is 1.
- ``orientation``: Determines whether the tree branches downwards
or rightwards. Possible values are ``'horizontal'`` and
``'vertical'``. The default value is ``'vertical'`` (i.e.,
branch downwards).
- ``draggable``: whether the widget can be dragged by the user.
"""
def __init__(self, canvas, node, subtrees, **attribs):
"""
:type node:
:type subtrees: list(CanvasWidgetI)
"""
self._node = node
self._subtrees = subtrees
# Attributes
self._horizontal = 0
self._roof = 0
self._xspace = 10
self._yspace = 15
self._ordered = False
# Create canvas objects.
self._lines = [canvas.create_line(0,0,0,0, fill='#006060')
for c in subtrees]
self._polygon = canvas.create_polygon(0,0, fill='', state='hidden',
outline='#006060')
# Register child widgets (node + subtrees)
self._add_child_widget(node)
for subtree in subtrees:
self._add_child_widget(subtree)
# Are we currently managing?
self._managing = False
CanvasWidget.__init__(self, canvas, **attribs)
def __setitem__(self, attr, value):
canvas = self.canvas()
if attr is 'roof':
self._roof = value
if self._roof:
for l in self._lines: canvas.itemconfig(l, state='hidden')
canvas.itemconfig(self._polygon, state='normal')
else:
for l in self._lines: canvas.itemconfig(l, state='normal')
canvas.itemconfig(self._polygon, state='hidden')
elif attr == 'orientation':
if value == 'horizontal': self._horizontal = 1
elif value == 'vertical': self._horizontal = 0
else:
raise ValueError('orientation must be horizontal or vertical')
elif attr == 'color':
for l in self._lines: canvas.itemconfig(l, fill=value)
canvas.itemconfig(self._polygon, outline=value)
elif isinstance(attr, tuple) and attr[0] == 'color':
# Set the color of an individual line.
l = self._lines[int(attr[1])]
canvas.itemconfig(l, fill=value)
elif attr == 'fill':
canvas.itemconfig(self._polygon, fill=value)
elif attr == 'width':
canvas.itemconfig(self._polygon, {attr:value})
for l in self._lines: canvas.itemconfig(l, {attr:value})
elif attr in ('xspace', 'yspace'):
if attr == 'xspace': self._xspace = value
elif attr == 'yspace': self._yspace = value
self.update(self._node)
elif attr == 'ordered':
self._ordered = value
else:
CanvasWidget.__setitem__(self, attr, value)
def __getitem__(self, attr):
if attr == 'roof': return self._roof
elif attr == 'width':
return self.canvas().itemcget(self._polygon, attr)
elif attr == 'color':
return self.canvas().itemcget(self._polygon, 'outline')
elif isinstance(attr, tuple) and attr[0] == 'color':
l = self._lines[int(attr[1])]
return self.canvas().itemcget(l, 'fill')
elif attr == 'xspace': return self._xspace
elif attr == 'yspace': return self._yspace
elif attr == 'orientation':
if self._horizontal: return 'horizontal'
else: return 'vertical'
elif attr == 'ordered':
return self._ordered
else:
return CanvasWidget.__getitem__(self, attr)
def node(self):
return self._node
def subtrees(self):
return self._subtrees[:]
def set_node(self, node):
"""
Set the node to ``node``.
"""
self._remove_child_widget(self._node)
self._add_child_widget(node)
self._node = node
self.update(self._node)
def replace_child(self, oldchild, newchild):
"""
Replace the child ``oldchild`` with ``newchild``.
"""
index = self._subtrees.index(oldchild)
self._subtrees[index] = newchild
self._remove_child_widget(oldchild)
self._add_child_widget(newchild)
self.update(newchild)
def remove_child(self, child):
index = self._subtrees.index(child)
del self._subtrees[index]
self._remove_child_widget(child)
self.canvas().delete(self._lines.pop())
self.update(self._node)
def insert_child(self, index, child):
self._subtrees.insert(index, child)
self._add_child_widget(child)
self._lines.append(canvas.create_line(0,0,0,0, fill='#006060'))
self.update(self._node)
# but.. lines???
def _tags(self):
if self._roof:
return [self._polygon]
else:
return self._lines
def _subtree_top(self, child):
if isinstance(child, TreeSegmentWidget):
bbox = child.node().bbox()
else:
bbox = child.bbox()
if self._horizontal:
return (bbox[0], (bbox[1]+bbox[3])/2.0)
else:
return ((bbox[0]+bbox[2])/2.0, bbox[1])
def _node_bottom(self):
bbox = self._node.bbox()
if self._horizontal:
return (bbox[2], (bbox[1]+bbox[3])/2.0)
else:
return ((bbox[0]+bbox[2])/2.0, bbox[3])
def _update(self, child):
if len(self._subtrees) == 0: return
if self._node.bbox() is None: return # [XX] ???
# Which lines need to be redrawn?
if child is self._node: need_update = self._subtrees
else: need_update = [child]
if self._ordered and not self._managing:
need_update = self._maintain_order(child)
# Update the polygon.
(nodex, nodey) = self._node_bottom()
(xmin, ymin, xmax, ymax) = self._subtrees[0].bbox()
for subtree in self._subtrees[1:]:
bbox = subtree.bbox()
xmin = min(xmin, bbox[0])
ymin = min(ymin, bbox[1])
xmax = max(xmax, bbox[2])
ymax = max(ymax, bbox[3])
if self._horizontal:
self.canvas().coords(self._polygon, nodex, nodey, xmin,
ymin, xmin, ymax, nodex, nodey)
else:
self.canvas().coords(self._polygon, nodex, nodey, xmin,
ymin, xmax, ymin, nodex, nodey)
# Redraw all lines that need it.
for subtree in need_update:
(nodex, nodey) = self._node_bottom()
line = self._lines[self._subtrees.index(subtree)]
(subtreex, subtreey) = self._subtree_top(subtree)
self.canvas().coords(line, nodex, nodey, subtreex, subtreey)
def _maintain_order(self, child):
if self._horizontal:
return self._maintain_order_horizontal(child)
else:
return self._maintain_order_vertical(child)
def _maintain_order_vertical(self, child):
(left, top, right, bot) = child.bbox()
if child is self._node:
# Check all the leaves
for subtree in self._subtrees:
(x1, y1, x2, y2) = subtree.bbox()
if bot+self._yspace > y1:
subtree.move(0,bot+self._yspace-y1)
return self._subtrees
else:
moved = [child]
index = self._subtrees.index(child)
# Check leaves to our right.
x = right + self._xspace
for i in range(index+1, len(self._subtrees)):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if x > x1:
self._subtrees[i].move(x-x1, 0)
x += x2-x1 + self._xspace
moved.append(self._subtrees[i])
# Check leaves to our left.
x = left - self._xspace
for i in range(index-1, -1, -1):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if x < x2:
self._subtrees[i].move(x-x2, 0)
x -= x2-x1 + self._xspace
moved.append(self._subtrees[i])
# Check the node
(x1, y1, x2, y2) = self._node.bbox()
if y2 > top-self._yspace:
self._node.move(0, top-self._yspace-y2)
moved = self._subtrees
# Return a list of the nodes we moved
return moved
def _maintain_order_horizontal(self, child):
(left, top, right, bot) = child.bbox()
if child is self._node:
# Check all the leaves
for subtree in self._subtrees:
(x1, y1, x2, y2) = subtree.bbox()
if right+self._xspace > x1:
subtree.move(right+self._xspace-x1)
return self._subtrees
else:
moved = [child]
index = self._subtrees.index(child)
# Check leaves below us.
y = bot + self._yspace
for i in range(index+1, len(self._subtrees)):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if y > y1:
self._subtrees[i].move(0, y-y1)
y += y2-y1 + self._yspace
moved.append(self._subtrees[i])
# Check leaves above us
y = top - self._yspace
for i in range(index-1, -1, -1):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if y < y2:
self._subtrees[i].move(0, y-y2)
y -= y2-y1 + self._yspace
moved.append(self._subtrees[i])
# Check the node
(x1, y1, x2, y2) = self._node.bbox()
if x2 > left-self._xspace:
self._node.move(left-self._xspace-x2, 0)
moved = self._subtrees
# Return a list of the nodes we moved
return moved
def _manage_horizontal(self):
(nodex, nodey) = self._node_bottom()
# Put the subtrees in a line.
y = 20
for subtree in self._subtrees:
subtree_bbox = subtree.bbox()
dx = nodex - subtree_bbox[0] + self._xspace
dy = y - subtree_bbox[1]
subtree.move(dx, dy)
y += subtree_bbox[3] - subtree_bbox[1] + self._yspace
# Find the center of their tops.
center = 0.0
for subtree in self._subtrees:
center += self._subtree_top(subtree)[1]
center /= len(self._subtrees)
# Center the subtrees with the node.
for subtree in self._subtrees:
subtree.move(0, nodey-center)
def _manage_vertical(self):
(nodex, nodey) = self._node_bottom()
# Put the subtrees in a line.
x = 0
for subtree in self._subtrees:
subtree_bbox = subtree.bbox()
dy = nodey - subtree_bbox[1] + self._yspace
dx = x - subtree_bbox[0]
subtree.move(dx, dy)
x += subtree_bbox[2] - subtree_bbox[0] + self._xspace
# Find the center of their tops.
center = 0.0
for subtree in self._subtrees:
center += self._subtree_top(subtree)[0]/len(self._subtrees)
# Center the subtrees with the node.
for subtree in self._subtrees:
subtree.move(nodex-center, 0)
def _manage(self):
self._managing = True
(nodex, nodey) = self._node_bottom()
if len(self._subtrees) == 0: return
if self._horizontal: self._manage_horizontal()
else: self._manage_vertical()
# Update lines to subtrees.
for subtree in self._subtrees:
self._update(subtree)
self._managing = False
def __repr__(self):
return '[TreeSeg %s: %s]' % (self._node, self._subtrees)
def _tree_to_treeseg(canvas, t, make_node, make_leaf,
tree_attribs, node_attribs,
leaf_attribs, loc_attribs):
if isinstance(t, Tree):
node = make_node(canvas, t.node, **node_attribs)
subtrees = [_tree_to_treeseg(canvas, child, make_node, make_leaf,
tree_attribs, node_attribs,
leaf_attribs, loc_attribs)
for child in t]
return TreeSegmentWidget(canvas, node, subtrees, **tree_attribs)
else:
return make_leaf(canvas, t, **leaf_attribs)
def tree_to_treesegment(canvas, t, make_node=TextWidget,
make_leaf=TextWidget, **attribs):
"""
Convert a Tree into a ``TreeSegmentWidget``.
:param make_node: A ``CanvasWidget`` constructor or a function that
creates ``CanvasWidgets``. ``make_node`` is used to convert
the Tree's nodes into ``CanvasWidgets``. If no constructor
is specified, then ``TextWidget`` will be used.
:param make_leaf: A ``CanvasWidget`` constructor or a function that
creates ``CanvasWidgets``. ``make_leaf`` is used to convert
the Tree's leafs into ``CanvasWidgets``. If no constructor
is specified, then ``TextWidget`` will be used.
:param attribs: Attributes for the canvas widgets that make up the
returned ``TreeSegmentWidget``. Any attribute beginning with
``'tree_'`` will be passed to all ``TreeSegmentWidgets`` (with
the ``'tree_'`` prefix removed. Any attribute beginning with
``'node_'`` will be passed to all nodes. Any attribute
beginning with ``'leaf_'`` will be passed to all leaves. And
any attribute beginning with ``'loc_'`` will be passed to all
text locations (for Trees).
"""
# Process attribs.
tree_attribs = {}
node_attribs = {}
leaf_attribs = {}
loc_attribs = {}
for (key, value) in attribs.items():
if key[:5] == 'tree_': tree_attribs[key[5:]] = value
elif key[:5] == 'node_': node_attribs[key[5:]] = value
elif key[:5] == 'leaf_': leaf_attribs[key[5:]] = value
elif key[:4] == 'loc_': loc_attribs[key[4:]] = value
else: raise ValueError('Bad attribute: %s' % key)
return _tree_to_treeseg(canvas, t, make_node, make_leaf,
tree_attribs, node_attribs,
leaf_attribs, loc_attribs)
##//////////////////////////////////////////////////////
## Tree Widget
##//////////////////////////////////////////////////////
class TreeWidget(CanvasWidget):
"""
A canvas widget that displays a single Tree.
``TreeWidget`` manages a group of ``TreeSegmentWidgets`` that are
used to display a Tree.
Attributes:
- ``node_attr``: Sets the attribute ``attr`` on all of the
node widgets for this ``TreeWidget``.
- ``node_attr``: Sets the attribute ``attr`` on all of the
leaf widgets for this ``TreeWidget``.
- ``loc_attr``: Sets the attribute ``attr`` on all of the
location widgets for this ``TreeWidget`` (if it was built from
a Tree). Note that a location widget is a ``TextWidget``.
- ``xspace``: The amount of horizontal space to leave between
subtrees when managing this widget. Default value is 10.
- ``yspace``: The amount of space to place between the node and
its children when managing this widget. Default value is 15.
- ``line_color``: The color of the lines connecting each expanded
node to its subtrees.
- ``roof_color``: The color of the outline of the triangular roof
for collapsed trees.
- ``roof_fill``: The fill color for the triangular roof for
collapsed trees.
- ``width``
- ``orientation``: Determines whether the tree branches downwards
or rightwards. Possible values are ``'horizontal'`` and
``'vertical'``. The default value is ``'vertical'`` (i.e.,
branch downwards).
- ``shapeable``: whether the subtrees can be independently
dragged by the user. THIS property simply sets the
``DRAGGABLE`` property on all of the ``TreeWidget``'s tree
segments.
- ``draggable``: whether the widget can be dragged by the user.
"""
def __init__(self, canvas, t, make_node=TextWidget,
make_leaf=TextWidget, **attribs):
# Node & leaf canvas widget constructors
self._make_node = make_node
self._make_leaf = make_leaf
self._tree = t
# Attributes.
self._nodeattribs = {}
self._leafattribs = {}
self._locattribs = {'color': '#008000'}
self._line_color = '#008080'
self._line_width = 1
self._roof_color = '#008080'
self._roof_fill = '#c0c0c0'
self._shapeable = False
self._xspace = 10
self._yspace = 10
self._orientation = 'vertical'
self._ordered = False
# Build trees.
self._keys = {} # treeseg -> key
self._expanded_trees = {}
self._collapsed_trees = {}
self._nodes = []
self._leaves = []
#self._locs = []
self._make_collapsed_trees(canvas, t, ())
self._treeseg = self._make_expanded_tree(canvas, t, ())
self._add_child_widget(self._treeseg)
CanvasWidget.__init__(self, canvas, **attribs)
def expanded_tree(self, *path_to_tree):
"""
Return the ``TreeSegmentWidget`` for the specified subtree.
:param path_to_tree: A list of indices i1, i2, ..., in, where
the desired widget is the widget corresponding to
``tree.children()[i1].children()[i2]....children()[in]``.
For the root, the path is ``()``.
"""
return self._expanded_trees[path_to_tree]
def collapsed_tree(self, *path_to_tree):
"""
Return the ``TreeSegmentWidget`` for the specified subtree.
:param path_to_tree: A list of indices i1, i2, ..., in, where
the desired widget is the widget corresponding to
``tree.children()[i1].children()[i2]....children()[in]``.
For the root, the path is ``()``.
"""
return self._collapsed_trees[path_to_tree]
def bind_click_trees(self, callback, button=1):
"""
Add a binding to all tree segments.
"""
for tseg in self._expanded_trees.values():
tseg.bind_click(callback, button)
for tseg in self._collapsed_trees.values():
tseg.bind_click(callback, button)
def bind_drag_trees(self, callback, button=1):
"""
Add a binding to all tree segments.
"""
for tseg in self._expanded_trees.values():
tseg.bind_drag(callback, button)
for tseg in self._collapsed_trees.values():
tseg.bind_drag(callback, button)
def bind_click_leaves(self, callback, button=1):
"""
Add a binding to all leaves.
"""
for leaf in self._leaves: leaf.bind_click(callback, button)
for leaf in self._leaves: leaf.bind_click(callback, button)
def bind_drag_leaves(self, callback, button=1):
"""
Add a binding to all leaves.
"""
for leaf in self._leaves: leaf.bind_drag(callback, button)
for leaf in self._leaves: leaf.bind_drag(callback, button)
def bind_click_nodes(self, callback, button=1):
"""
Add a binding to all nodes.
"""
for node in self._nodes: node.bind_click(callback, button)
for node in self._nodes: node.bind_click(callback, button)
def bind_drag_nodes(self, callback, button=1):
"""
Add a binding to all nodes.
"""
for node in self._nodes: node.bind_drag(callback, button)
for node in self._nodes: node.bind_drag(callback, button)
def _make_collapsed_trees(self, canvas, t, key):
if not isinstance(t, Tree): return
make_node = self._make_node
make_leaf = self._make_leaf
node = make_node(canvas, t.node, **self._nodeattribs)
self._nodes.append(node)
leaves = [make_leaf(canvas, l, **self._leafattribs)
for l in t.leaves()]
self._leaves += leaves
treeseg = TreeSegmentWidget(canvas, node, leaves, roof=1,
color=self._roof_color,
fill=self._roof_fill,
width=self._line_width)
self._collapsed_trees[key] = treeseg
self._keys[treeseg] = key
#self._add_child_widget(treeseg)
treeseg.hide()
# Build trees for children.
for i in range(len(t)):
child = t[i]
self._make_collapsed_trees(canvas, child, key + (i,))
def _make_expanded_tree(self, canvas, t, key):
make_node = self._make_node
make_leaf = self._make_leaf
if isinstance(t, Tree):
node = make_node(canvas, t.node, **self._nodeattribs)
self._nodes.append(node)
children = t
subtrees = [self._make_expanded_tree(canvas, children[i], key+(i,))
for i in range(len(children))]
treeseg = TreeSegmentWidget(canvas, node, subtrees,
color=self._line_color,
width=self._line_width)
self._expanded_trees[key] = treeseg
self._keys[treeseg] = key
return treeseg
else:
leaf = make_leaf(canvas, t, **self._leafattribs)
self._leaves.append(leaf)
return leaf
def __setitem__(self, attr, value):
if attr[:5] == 'node_':
for node in self._nodes: node[attr[5:]] = value
elif attr[:5] == 'leaf_':
for leaf in self._leaves: leaf[attr[5:]] = value
elif attr == 'line_color':
self._line_color = value
for tseg in self._expanded_trees.values(): tseg['color'] = value
elif attr == 'line_width':
self._line_width = value
for tseg in self._expanded_trees.values(): tseg['width'] = value
for tseg in self._collapsed_trees.values(): tseg['width'] = value
elif attr == 'roof_color':
self._roof_color = value
for tseg in self._collapsed_trees.values(): tseg['color'] = value
elif attr == 'roof_fill':
self._roof_fill = value
for tseg in self._collapsed_trees.values(): tseg['fill'] = value
elif attr == 'shapeable':
self._shapeable = value
for tseg in self._expanded_trees.values():
tseg['draggable'] = value
for tseg in self._collapsed_trees.values():
tseg['draggable'] = value
for leaf in self._leaves: leaf['draggable'] = value
elif attr == 'xspace':
self._xspace = value
for tseg in self._expanded_trees.values():
tseg['xspace'] = value
for tseg in self._collapsed_trees.values():
tseg['xspace'] = value
self.manage()
elif attr == 'yspace':
self._yspace = value
for tseg in self._expanded_trees.values():
tseg['yspace'] = value
for tseg in self._collapsed_trees.values():
tseg['yspace'] = value
self.manage()
elif attr == 'orientation':
self._orientation = value
for tseg in self._expanded_trees.values():
tseg['orientation'] = value
for tseg in self._collapsed_trees.values():
tseg['orientation'] = value
self.manage()
elif attr == 'ordered':
self._ordered = value
for tseg in self._expanded_trees.values():
tseg['ordered'] = value
for tseg in self._collapsed_trees.values():
tseg['ordered'] = value
else: CanvasWidget.__setitem__(self, attr, value)
def __getitem__(self, attr):
if attr[:5] == 'node_':
return self._nodeattribs.get(attr[5:], None)
elif attr[:5] == 'leaf_':
return self._leafattribs.get(attr[5:], None)
elif attr[:4] == 'loc_':
return self._locattribs.get(attr[4:], None)
elif attr == 'line_color': return self._line_color
elif attr == 'line_width': return self._line_width
elif attr == 'roof_color': return self._roof_color
elif attr == 'roof_fill': return self._roof_fill
elif attr == 'shapeable': return self._shapeable
elif attr == 'xspace': return self._xspace
elif attr == 'yspace': return self._yspace
elif attr == 'orientation': return self._orientation
else: return CanvasWidget.__getitem__(self, attr)
def _tags(self): return []
def _manage(self):
segs = self._expanded_trees.values() + self._collapsed_trees.values()
for tseg in segs:
if tseg.hidden():
tseg.show()
tseg.manage()
tseg.hide()
def toggle_collapsed(self, treeseg):
"""
Collapse/expand a tree.
"""
old_treeseg = treeseg
if old_treeseg['roof']:
new_treeseg = self._expanded_trees[self._keys[old_treeseg]]
else:
new_treeseg = self._collapsed_trees[self._keys[old_treeseg]]
# Replace the old tree with the new tree.
if old_treeseg.parent() is self:
self._remove_child_widget(old_treeseg)
self._add_child_widget(new_treeseg)
self._treeseg = new_treeseg
else:
old_treeseg.parent().replace_child(old_treeseg, new_treeseg)
# Move the new tree to where the old tree was. Show it first,
# so we can find its bounding box.
new_treeseg.show()
(newx, newy) = new_treeseg.node().bbox()[:2]
(oldx, oldy) = old_treeseg.node().bbox()[:2]
new_treeseg.move(oldx-newx, oldy-newy)
# Hide the old tree
old_treeseg.hide()
# We could do parent.manage() here instead, if we wanted.
new_treeseg.parent().update(new_treeseg)
##//////////////////////////////////////////////////////
## draw_trees
##//////////////////////////////////////////////////////
class TreeView(object):
def __init__(self, *trees):
from math import sqrt, ceil
self._trees = trees
self._top = Tk()
self._top.title('NLTK')
self._top.bind('<Control-x>', self.destroy)
self._top.bind('<Control-q>', self.destroy)
cf = self._cframe = CanvasFrame(self._top)
self._top.bind('<Control-p>', self._cframe.print_to_file)
# Size is variable.
self._size = IntVar(self._top)
self._size.set(12)
bold = ('helvetica', -self._size.get(), 'bold')
helv = ('helvetica', -self._size.get())
# Lay the trees out in a square.
self._width = int(ceil(sqrt(len(trees))))
self._widgets = []
for i in range(len(trees)):
widget = TreeWidget(cf.canvas(), trees[i], node_font=bold,
leaf_color='#008040', node_color='#004080',
roof_color='#004040', roof_fill='white',
line_color='#004040', draggable=1,
leaf_font=helv)
widget.bind_click_trees(widget.toggle_collapsed)
self._widgets.append(widget)
cf.add_widget(widget, 0, 0)
self._layout()
self._cframe.pack(expand=1, fill='both')
self._init_menubar()
def _layout(self):
i = x = y = ymax = 0
width = self._width
for i in range(len(self._widgets)):
widget = self._widgets[i]
(oldx, oldy) = widget.bbox()[:2]
if i % width == 0:
y = ymax
x = 0
widget.move(x-oldx, y-oldy)
x = widget.bbox()[2] + 10
ymax = max(ymax, widget.bbox()[3] + 10)
def _init_menubar(self):
menubar = Menu(self._top)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label='Print to Postscript', underline=0,
command=self._cframe.print_to_file,
accelerator='Ctrl-p')
filemenu.add_command(label='Exit', underline=1,
command=self.destroy, accelerator='Ctrl-x')
menubar.add_cascade(label='File', underline=0, menu=filemenu)
zoommenu = Menu(menubar, tearoff=0)
zoommenu.add_radiobutton(label='Tiny', variable=self._size,
underline=0, value=10, command=self.resize)
zoommenu.add_radiobutton(label='Small', variable=self._size,
underline=0, value=12, command=self.resize)
zoommenu.add_radiobutton(label='Medium', variable=self._size,
underline=0, value=14, command=self.resize)
zoommenu.add_radiobutton(label='Large', variable=self._size,
underline=0, value=28, command=self.resize)
zoommenu.add_radiobutton(label='Huge', variable=self._size,
underline=0, value=50, command=self.resize)
menubar.add_cascade(label='Zoom', underline=0, menu=zoommenu)
self._top.config(menu=menubar)
def resize(self, *e):
bold = ('helvetica', -self._size.get(), 'bold')
helv = ('helvetica', -self._size.get())
xspace = self._size.get()
yspace = self._size.get()
for widget in self._widgets:
widget['node_font'] = bold
widget['leaf_font'] = helv
widget['xspace'] = xspace
widget['yspace'] = yspace
if self._size.get() < 20: widget['line_width'] = 1
elif self._size.get() < 30: widget['line_width'] = 2
else: widget['line_width'] = 3
self._layout()
def destroy(self, *e):
if self._top is None: return
self._top.destroy()
self._top = None
def mainloop(self, *args, **kwargs):
"""
Enter the Tkinter mainloop. This function must be called if
this demo is created from a non-interactive program (e.g.
from a secript); otherwise, the demo will close as soon as
the script completes.
"""
if in_idle(): return
self._top.mainloop(*args, **kwargs)
def draw_trees(*trees):
"""
Open a new window containing a graphical diagram of the given
trees.
:rtype: None
"""
TreeView(*trees).mainloop()
return
##//////////////////////////////////////////////////////
## Demo Code
##//////////////////////////////////////////////////////
def demo():
import random
def fill(cw):
cw['fill'] = '#%06d' % random.randint(0,999999)
cf = CanvasFrame(width=550, height=450, closeenough=2)
t = Tree.parse('''
(S (NP the very big cat)
(VP (Adv sorta) (V saw) (NP (Det the) (N dog))))''')
tc = TreeWidget(cf.canvas(), t, draggable=1,
node_font=('helvetica', -14, 'bold'),
leaf_font=('helvetica', -12, 'italic'),
roof_fill='white', roof_color='black',
leaf_color='green4', node_color='blue2')
cf.add_widget(tc,10,10)
def boxit(canvas, text):
big = ('helvetica', -16, 'bold')
return BoxWidget(canvas, TextWidget(canvas, text,
font=big), fill='green')
def ovalit(canvas, text):
return OvalWidget(canvas, TextWidget(canvas, text),
fill='cyan')
treetok = Tree.parse('(S (NP this tree) (VP (V is) (AdjP shapeable)))')
tc2 = TreeWidget(cf.canvas(), treetok, boxit, ovalit, shapeable=1)
def color(node):
node['color'] = '#%04d00' % random.randint(0,9999)
def color2(treeseg):
treeseg.node()['fill'] = '#%06d' % random.randint(0,9999)
treeseg.node().child()['color'] = 'white'
tc.bind_click_trees(tc.toggle_collapsed)
tc2.bind_click_trees(tc2.toggle_collapsed)
tc.bind_click_nodes(color, 3)
tc2.expanded_tree(1).bind_click(color2, 3)
tc2.expanded_tree().bind_click(color2, 3)
paren = ParenWidget(cf.canvas(), tc2)
cf.add_widget(paren, tc.bbox()[2]+10, 10)
tree3 = Tree.parse('''
(S (NP this tree) (AUX was)
(VP (V built) (PP (P with) (NP (N tree_to_treesegment)))))''')
tc3 = tree_to_treesegment(cf.canvas(), tree3, tree_color='green4',
tree_xspace=2, tree_width=2)
tc3['draggable'] = 1
cf.add_widget(tc3, 10, tc.bbox()[3]+10)
def orientswitch(treewidget):
if treewidget['orientation'] == 'horizontal':
treewidget.expanded_tree(1,1).subtrees()[0].set_text('vertical')
treewidget.collapsed_tree(1,1).subtrees()[0].set_text('vertical')
treewidget.collapsed_tree(1).subtrees()[1].set_text('vertical')
treewidget.collapsed_tree().subtrees()[3].set_text('vertical')
treewidget['orientation'] = 'vertical'
else:
treewidget.expanded_tree(1,1).subtrees()[0].set_text('horizontal')
treewidget.collapsed_tree(1,1).subtrees()[0].set_text('horizontal')
treewidget.collapsed_tree(1).subtrees()[1].set_text('horizontal')
treewidget.collapsed_tree().subtrees()[3].set_text('horizontal')
treewidget['orientation'] = 'horizontal'
text = """
Try clicking, right clicking, and dragging
different elements of each of the trees.
The top-left tree is a TreeWidget built from
a Tree. The top-right is a TreeWidget built
from a Tree, using non-default widget
constructors for the nodes & leaves (BoxWidget
and OvalWidget). The bottom-left tree is
built from tree_to_treesegment."""
twidget = TextWidget(cf.canvas(), text.strip())
textbox = BoxWidget(cf.canvas(), twidget, fill='white', draggable=1)
cf.add_widget(textbox, tc3.bbox()[2]+10, tc2.bbox()[3]+10)
tree4 = Tree.parse('(S (NP this tree) (VP (V is) (Adj horizontal)))')
tc4 = TreeWidget(cf.canvas(), tree4, draggable=1,
line_color='brown2', roof_color='brown2',
node_font=('helvetica', -12, 'bold'),
node_color='brown4', orientation='horizontal')
tc4.manage()
cf.add_widget(tc4, tc3.bbox()[2]+10, textbox.bbox()[3]+10)
tc4.bind_click(orientswitch)
tc4.bind_click_trees(tc4.toggle_collapsed, 3)
# Run mainloop
cf.mainloop()
if __name__ == '__main__':
demo()
|
neumerance/deploy | refs/heads/master | .venv/lib/python2.7/site-packages/babel/core.py | 79 | # -*- coding: utf-8 -*-
"""
babel.core
~~~~~~~~~~
Core locale representation and locale data access.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
import os
from babel import localedata
from babel._compat import pickle, string_types
__all__ = ['UnknownLocaleError', 'Locale', 'default_locale', 'negotiate_locale',
'parse_locale']
_global_data = None
def _raise_no_data_error():
raise RuntimeError('The babel data files are not available. '
'This usually happens because you are using '
'a source checkout from Babel and you did '
'not build the data files. Just make sure '
'to run "python setup.py import_cldr" before '
'installing the library.')
def get_global(key):
"""Return the dictionary for the given key in the global data.
The global data is stored in the ``babel/global.dat`` file and contains
information independent of individual locales.
>>> get_global('zone_aliases')['UTC']
u'Etc/GMT'
>>> get_global('zone_territories')['Europe/Berlin']
u'DE'
.. versionadded:: 0.9
:param key: the data key
"""
global _global_data
if _global_data is None:
dirname = os.path.join(os.path.dirname(__file__))
filename = os.path.join(dirname, 'global.dat')
if not os.path.isfile(filename):
_raise_no_data_error()
fileobj = open(filename, 'rb')
try:
_global_data = pickle.load(fileobj)
finally:
fileobj.close()
return _global_data.get(key, {})
LOCALE_ALIASES = {
'ar': 'ar_SY', 'bg': 'bg_BG', 'bs': 'bs_BA', 'ca': 'ca_ES', 'cs': 'cs_CZ',
'da': 'da_DK', 'de': 'de_DE', 'el': 'el_GR', 'en': 'en_US', 'es': 'es_ES',
'et': 'et_EE', 'fa': 'fa_IR', 'fi': 'fi_FI', 'fr': 'fr_FR', 'gl': 'gl_ES',
'he': 'he_IL', 'hu': 'hu_HU', 'id': 'id_ID', 'is': 'is_IS', 'it': 'it_IT',
'ja': 'ja_JP', 'km': 'km_KH', 'ko': 'ko_KR', 'lt': 'lt_LT', 'lv': 'lv_LV',
'mk': 'mk_MK', 'nl': 'nl_NL', 'nn': 'nn_NO', 'no': 'nb_NO', 'pl': 'pl_PL',
'pt': 'pt_PT', 'ro': 'ro_RO', 'ru': 'ru_RU', 'sk': 'sk_SK', 'sl': 'sl_SI',
'sv': 'sv_SE', 'th': 'th_TH', 'tr': 'tr_TR', 'uk': 'uk_UA'
}
class UnknownLocaleError(Exception):
"""Exception thrown when a locale is requested for which no locale data
is available.
"""
def __init__(self, identifier):
"""Create the exception.
:param identifier: the identifier string of the unsupported locale
"""
Exception.__init__(self, 'unknown locale %r' % identifier)
#: The identifier of the locale that could not be found.
self.identifier = identifier
class Locale(object):
"""Representation of a specific locale.
>>> locale = Locale('en', 'US')
>>> repr(locale)
"Locale('en', territory='US')"
>>> locale.display_name
u'English (United States)'
A `Locale` object can also be instantiated from a raw locale string:
>>> locale = Locale.parse('en-US', sep='-')
>>> repr(locale)
"Locale('en', territory='US')"
`Locale` objects provide access to a collection of locale data, such as
territory and language names, number and date format patterns, and more:
>>> locale.number_symbols['decimal']
u'.'
If a locale is requested for which no locale data is available, an
`UnknownLocaleError` is raised:
>>> Locale.parse('en_DE')
Traceback (most recent call last):
...
UnknownLocaleError: unknown locale 'en_DE'
For more information see :rfc:`3066`.
"""
def __init__(self, language, territory=None, script=None, variant=None):
"""Initialize the locale object from the given identifier components.
>>> locale = Locale('en', 'US')
>>> locale.language
'en'
>>> locale.territory
'US'
:param language: the language code
:param territory: the territory (country or region) code
:param script: the script code
:param variant: the variant code
:raise `UnknownLocaleError`: if no locale data is available for the
requested locale
"""
#: the language code
self.language = language
#: the territory (country or region) code
self.territory = territory
#: the script code
self.script = script
#: the variant code
self.variant = variant
self.__data = None
identifier = str(self)
if not localedata.exists(identifier):
raise UnknownLocaleError(identifier)
@classmethod
def default(cls, category=None, aliases=LOCALE_ALIASES):
"""Return the system default locale for the specified category.
>>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LC_MESSAGES']:
... os.environ[name] = ''
>>> os.environ['LANG'] = 'fr_FR.UTF-8'
>>> Locale.default('LC_MESSAGES')
Locale('fr', territory='FR')
The following fallbacks to the variable are always considered:
- ``LANGUAGE``
- ``LC_ALL``
- ``LC_CTYPE``
- ``LANG``
:param category: one of the ``LC_XXX`` environment variable names
:param aliases: a dictionary of aliases for locale identifiers
"""
# XXX: use likely subtag expansion here instead of the
# aliases dictionary.
locale_string = default_locale(category, aliases=aliases)
return cls.parse(locale_string)
@classmethod
def negotiate(cls, preferred, available, sep='_', aliases=LOCALE_ALIASES):
"""Find the best match between available and requested locale strings.
>>> Locale.negotiate(['de_DE', 'en_US'], ['de_DE', 'de_AT'])
Locale('de', territory='DE')
>>> Locale.negotiate(['de_DE', 'en_US'], ['en', 'de'])
Locale('de')
>>> Locale.negotiate(['de_DE', 'de'], ['en_US'])
You can specify the character used in the locale identifiers to separate
the differnet components. This separator is applied to both lists. Also,
case is ignored in the comparison:
>>> Locale.negotiate(['de-DE', 'de'], ['en-us', 'de-de'], sep='-')
Locale('de', territory='DE')
:param preferred: the list of locale identifers preferred by the user
:param available: the list of locale identifiers available
:param aliases: a dictionary of aliases for locale identifiers
"""
identifier = negotiate_locale(preferred, available, sep=sep,
aliases=aliases)
if identifier:
return Locale.parse(identifier, sep=sep)
@classmethod
def parse(cls, identifier, sep='_', resolve_likely_subtags=True):
"""Create a `Locale` instance for the given locale identifier.
>>> l = Locale.parse('de-DE', sep='-')
>>> l.display_name
u'Deutsch (Deutschland)'
If the `identifier` parameter is not a string, but actually a `Locale`
object, that object is returned:
>>> Locale.parse(l)
Locale('de', territory='DE')
This also can perform resolving of likely subtags which it does
by default. This is for instance useful to figure out the most
likely locale for a territory you can use ``'und'`` as the
language tag:
>>> Locale.parse('und_AT')
Locale('de', territory='AT')
:param identifier: the locale identifier string
:param sep: optional component separator
:param resolve_likely_subtags: if this is specified then a locale will
have its likely subtag resolved if the
locale otherwise does not exist. For
instance ``zh_TW`` by itself is not a
locale that exists but Babel can
automatically expand it to the full
form of ``zh_hant_TW``. Note that this
expansion is only taking place if no
locale exists otherwise. For instance
there is a locale ``en`` that can exist
by itself.
:raise `ValueError`: if the string does not appear to be a valid locale
identifier
:raise `UnknownLocaleError`: if no locale data is available for the
requested locale
"""
if identifier is None:
return None
elif isinstance(identifier, Locale):
return identifier
elif not isinstance(identifier, string_types):
raise TypeError('Unxpected value for identifier: %r' % (identifier,))
parts = parse_locale(identifier, sep=sep)
input_id = get_locale_identifier(parts)
def _try_load(parts):
try:
return cls(*parts)
except UnknownLocaleError:
return None
def _try_load_reducing(parts):
# Success on first hit, return it.
locale = _try_load(parts)
if locale is not None:
return locale
# Now try without script and variant
locale = _try_load(parts[:2])
if locale is not None:
return locale
locale = _try_load(parts)
if locale is not None:
return locale
if not resolve_likely_subtags:
raise UnknownLocaleError(input_id)
# From here onwards is some very bad likely subtag resolving. This
# whole logic is not entirely correct but good enough (tm) for the
# time being. This has been added so that zh_TW does not cause
# errors for people when they upgrade. Later we should properly
# implement ICU like fuzzy locale objects and provide a way to
# maximize and minimize locale tags.
language, territory, script, variant = parts
language = get_global('language_aliases').get(language, language)
territory = get_global('territory_aliases').get(territory, territory)
script = get_global('script_aliases').get(script, script)
variant = get_global('variant_aliases').get(variant, variant)
if territory == 'ZZ':
territory = None
if script == 'Zzzz':
script = None
parts = language, territory, script, variant
# First match: try the whole identifier
new_id = get_locale_identifier(parts)
likely_subtag = get_global('likely_subtags').get(new_id)
if likely_subtag is not None:
locale = _try_load_reducing(parse_locale(likely_subtag))
if locale is not None:
return locale
# If we did not find anything so far, try again with a
# simplified identifier that is just the language
likely_subtag = get_global('likely_subtags').get(language)
if likely_subtag is not None:
language2, _, script2, variant2 = parse_locale(likely_subtag)
locale = _try_load_reducing((language2, territory, script2, variant2))
if locale is not None:
return locale
raise UnknownLocaleError(input_id)
def __eq__(self, other):
for key in ('language', 'territory', 'script', 'variant'):
if not hasattr(other, key):
return False
return (self.language == other.language) and \
(self.territory == other.territory) and \
(self.script == other.script) and \
(self.variant == other.variant)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parameters = ['']
for key in ('territory', 'script', 'variant'):
value = getattr(self, key)
if value is not None:
parameters.append('%s=%r' % (key, value))
parameter_string = '%r' % self.language + ', '.join(parameters)
return 'Locale(%s)' % parameter_string
def __str__(self):
return get_locale_identifier((self.language, self.territory,
self.script, self.variant))
@property
def _data(self):
if self.__data is None:
self.__data = localedata.LocaleDataDict(localedata.load(str(self)))
return self.__data
def get_display_name(self, locale=None):
"""Return the display name of the locale using the given locale.
The display name will include the language, territory, script, and
variant, if those are specified.
>>> Locale('zh', 'CN', script='Hans').get_display_name('en')
u'Chinese (Simplified, China)'
:param locale: the locale to use
"""
if locale is None:
locale = self
locale = Locale.parse(locale)
retval = locale.languages.get(self.language)
if self.territory or self.script or self.variant:
details = []
if self.script:
details.append(locale.scripts.get(self.script))
if self.territory:
details.append(locale.territories.get(self.territory))
if self.variant:
details.append(locale.variants.get(self.variant))
details = filter(None, details)
if details:
retval += ' (%s)' % u', '.join(details)
return retval
display_name = property(get_display_name, doc="""\
The localized display name of the locale.
>>> Locale('en').display_name
u'English'
>>> Locale('en', 'US').display_name
u'English (United States)'
>>> Locale('sv').display_name
u'svenska'
:type: `unicode`
""")
def get_language_name(self, locale=None):
"""Return the language of this locale in the given locale.
>>> Locale('zh', 'CN', script='Hans').get_language_name('de')
u'Chinesisch'
.. versionadded:: 1.0
:param locale: the locale to use
"""
if locale is None:
locale = self
locale = Locale.parse(locale)
return locale.languages.get(self.language)
language_name = property(get_language_name, doc="""\
The localized language name of the locale.
>>> Locale('en', 'US').language_name
u'English'
""")
def get_territory_name(self, locale=None):
"""Return the territory name in the given locale."""
if locale is None:
locale = self
locale = Locale.parse(locale)
return locale.territories.get(self.territory)
territory_name = property(get_territory_name, doc="""\
The localized territory name of the locale if available.
>>> Locale('de', 'DE').territory_name
u'Deutschland'
""")
def get_script_name(self, locale=None):
"""Return the script name in the given locale."""
if locale is None:
locale = self
locale = Locale.parse(locale)
return locale.scripts.get(self.script)
script_name = property(get_script_name, doc="""\
The localized script name of the locale if available.
>>> Locale('ms', 'SG', script='Latn').script_name
u'Latin'
""")
@property
def english_name(self):
"""The english display name of the locale.
>>> Locale('de').english_name
u'German'
>>> Locale('de', 'DE').english_name
u'German (Germany)'
:type: `unicode`"""
return self.get_display_name(Locale('en'))
#{ General Locale Display Names
@property
def languages(self):
"""Mapping of language codes to translated language names.
>>> Locale('de', 'DE').languages['ja']
u'Japanisch'
See `ISO 639 <http://www.loc.gov/standards/iso639-2/>`_ for
more information.
"""
return self._data['languages']
@property
def scripts(self):
"""Mapping of script codes to translated script names.
>>> Locale('en', 'US').scripts['Hira']
u'Hiragana'
See `ISO 15924 <http://www.evertype.com/standards/iso15924/>`_
for more information.
"""
return self._data['scripts']
@property
def territories(self):
"""Mapping of script codes to translated script names.
>>> Locale('es', 'CO').territories['DE']
u'Alemania'
See `ISO 3166 <http://www.iso.org/iso/en/prods-services/iso3166ma/>`_
for more information.
"""
return self._data['territories']
@property
def variants(self):
"""Mapping of script codes to translated script names.
>>> Locale('de', 'DE').variants['1901']
u'Alte deutsche Rechtschreibung'
"""
return self._data['variants']
#{ Number Formatting
@property
def currencies(self):
"""Mapping of currency codes to translated currency names. This
only returns the generic form of the currency name, not the count
specific one. If an actual number is requested use the
:func:`babel.numbers.get_currency_name` function.
>>> Locale('en').currencies['COP']
u'Colombian Peso'
>>> Locale('de', 'DE').currencies['COP']
u'Kolumbianischer Peso'
"""
return self._data['currency_names']
@property
def currency_symbols(self):
"""Mapping of currency codes to symbols.
>>> Locale('en', 'US').currency_symbols['USD']
u'$'
>>> Locale('es', 'CO').currency_symbols['USD']
u'US$'
"""
return self._data['currency_symbols']
@property
def number_symbols(self):
"""Symbols used in number formatting.
>>> Locale('fr', 'FR').number_symbols['decimal']
u','
"""
return self._data['number_symbols']
@property
def decimal_formats(self):
"""Locale patterns for decimal number formatting.
>>> Locale('en', 'US').decimal_formats[None]
<NumberPattern u'#,##0.###'>
"""
return self._data['decimal_formats']
@property
def currency_formats(self):
"""Locale patterns for currency number formatting.
>>> print Locale('en', 'US').currency_formats[None]
<NumberPattern u'\\xa4#,##0.00'>
"""
return self._data['currency_formats']
@property
def percent_formats(self):
"""Locale patterns for percent number formatting.
>>> Locale('en', 'US').percent_formats[None]
<NumberPattern u'#,##0%'>
"""
return self._data['percent_formats']
@property
def scientific_formats(self):
"""Locale patterns for scientific number formatting.
>>> Locale('en', 'US').scientific_formats[None]
<NumberPattern u'#E0'>
"""
return self._data['scientific_formats']
#{ Calendar Information and Date Formatting
@property
def periods(self):
"""Locale display names for day periods (AM/PM).
>>> Locale('en', 'US').periods['am']
u'AM'
"""
return self._data['periods']
@property
def days(self):
"""Locale display names for weekdays.
>>> Locale('de', 'DE').days['format']['wide'][3]
u'Donnerstag'
"""
return self._data['days']
@property
def months(self):
"""Locale display names for months.
>>> Locale('de', 'DE').months['format']['wide'][10]
u'Oktober'
"""
return self._data['months']
@property
def quarters(self):
"""Locale display names for quarters.
>>> Locale('de', 'DE').quarters['format']['wide'][1]
u'1. Quartal'
"""
return self._data['quarters']
@property
def eras(self):
"""Locale display names for eras.
>>> Locale('en', 'US').eras['wide'][1]
u'Anno Domini'
>>> Locale('en', 'US').eras['abbreviated'][0]
u'BC'
"""
return self._data['eras']
@property
def time_zones(self):
"""Locale display names for time zones.
>>> Locale('en', 'US').time_zones['Europe/London']['long']['daylight']
u'British Summer Time'
>>> Locale('en', 'US').time_zones['America/St_Johns']['city']
u'St. John\u2019s'
"""
return self._data['time_zones']
@property
def meta_zones(self):
"""Locale display names for meta time zones.
Meta time zones are basically groups of different Olson time zones that
have the same GMT offset and daylight savings time.
>>> Locale('en', 'US').meta_zones['Europe_Central']['long']['daylight']
u'Central European Summer Time'
.. versionadded:: 0.9
"""
return self._data['meta_zones']
@property
def zone_formats(self):
"""Patterns related to the formatting of time zones.
>>> Locale('en', 'US').zone_formats['fallback']
u'%(1)s (%(0)s)'
>>> Locale('pt', 'BR').zone_formats['region']
u'Hor\\xe1rio %s'
.. versionadded:: 0.9
"""
return self._data['zone_formats']
@property
def first_week_day(self):
"""The first day of a week, with 0 being Monday.
>>> Locale('de', 'DE').first_week_day
0
>>> Locale('en', 'US').first_week_day
6
"""
return self._data['week_data']['first_day']
@property
def weekend_start(self):
"""The day the weekend starts, with 0 being Monday.
>>> Locale('de', 'DE').weekend_start
5
"""
return self._data['week_data']['weekend_start']
@property
def weekend_end(self):
"""The day the weekend ends, with 0 being Monday.
>>> Locale('de', 'DE').weekend_end
6
"""
return self._data['week_data']['weekend_end']
@property
def min_week_days(self):
"""The minimum number of days in a week so that the week is counted as
the first week of a year or month.
>>> Locale('de', 'DE').min_week_days
4
"""
return self._data['week_data']['min_days']
@property
def date_formats(self):
"""Locale patterns for date formatting.
>>> Locale('en', 'US').date_formats['short']
<DateTimePattern u'M/d/yy'>
>>> Locale('fr', 'FR').date_formats['long']
<DateTimePattern u'd MMMM y'>
"""
return self._data['date_formats']
@property
def time_formats(self):
"""Locale patterns for time formatting.
>>> Locale('en', 'US').time_formats['short']
<DateTimePattern u'h:mm a'>
>>> Locale('fr', 'FR').time_formats['long']
<DateTimePattern u'HH:mm:ss z'>
"""
return self._data['time_formats']
@property
def datetime_formats(self):
"""Locale patterns for datetime formatting.
>>> Locale('en').datetime_formats['full']
u"{1} 'at' {0}"
>>> Locale('th').datetime_formats['medium']
u'{1}, {0}'
"""
return self._data['datetime_formats']
@property
def plural_form(self):
"""Plural rules for the locale.
>>> Locale('en').plural_form(1)
'one'
>>> Locale('en').plural_form(0)
'other'
>>> Locale('fr').plural_form(0)
'one'
>>> Locale('ru').plural_form(100)
'many'
"""
return self._data['plural_form']
def default_locale(category=None, aliases=LOCALE_ALIASES):
"""Returns the system default locale for a given category, based on
environment variables.
>>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE']:
... os.environ[name] = ''
>>> os.environ['LANG'] = 'fr_FR.UTF-8'
>>> default_locale('LC_MESSAGES')
'fr_FR'
The "C" or "POSIX" pseudo-locales are treated as aliases for the
"en_US_POSIX" locale:
>>> os.environ['LC_MESSAGES'] = 'POSIX'
>>> default_locale('LC_MESSAGES')
'en_US_POSIX'
The following fallbacks to the variable are always considered:
- ``LANGUAGE``
- ``LC_ALL``
- ``LC_CTYPE``
- ``LANG``
:param category: one of the ``LC_XXX`` environment variable names
:param aliases: a dictionary of aliases for locale identifiers
"""
varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG')
for name in filter(None, varnames):
locale = os.getenv(name)
if locale:
if name == 'LANGUAGE' and ':' in locale:
# the LANGUAGE variable may contain a colon-separated list of
# language codes; we just pick the language on the list
locale = locale.split(':')[0]
if locale in ('C', 'POSIX'):
locale = 'en_US_POSIX'
elif aliases and locale in aliases:
locale = aliases[locale]
try:
return get_locale_identifier(parse_locale(locale))
except ValueError:
pass
def negotiate_locale(preferred, available, sep='_', aliases=LOCALE_ALIASES):
"""Find the best match between available and requested locale strings.
>>> negotiate_locale(['de_DE', 'en_US'], ['de_DE', 'de_AT'])
'de_DE'
>>> negotiate_locale(['de_DE', 'en_US'], ['en', 'de'])
'de'
Case is ignored by the algorithm, the result uses the case of the preferred
locale identifier:
>>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at'])
'de_DE'
>>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at'])
'de_DE'
By default, some web browsers unfortunately do not include the territory
in the locale identifier for many locales, and some don't even allow the
user to easily add the territory. So while you may prefer using qualified
locale identifiers in your web-application, they would not normally match
the language-only locale sent by such browsers. To workaround that, this
function uses a default mapping of commonly used langauge-only locale
identifiers to identifiers including the territory:
>>> negotiate_locale(['ja', 'en_US'], ['ja_JP', 'en_US'])
'ja_JP'
Some browsers even use an incorrect or outdated language code, such as "no"
for Norwegian, where the correct locale identifier would actually be "nb_NO"
(Bokmål) or "nn_NO" (Nynorsk). The aliases are intended to take care of
such cases, too:
>>> negotiate_locale(['no', 'sv'], ['nb_NO', 'sv_SE'])
'nb_NO'
You can override this default mapping by passing a different `aliases`
dictionary to this function, or you can bypass the behavior althogher by
setting the `aliases` parameter to `None`.
:param preferred: the list of locale strings preferred by the user
:param available: the list of locale strings available
:param sep: character that separates the different parts of the locale
strings
:param aliases: a dictionary of aliases for locale identifiers
"""
available = [a.lower() for a in available if a]
for locale in preferred:
ll = locale.lower()
if ll in available:
return locale
if aliases:
alias = aliases.get(ll)
if alias:
alias = alias.replace('_', sep)
if alias.lower() in available:
return alias
parts = locale.split(sep)
if len(parts) > 1 and parts[0].lower() in available:
return parts[0]
return None
def parse_locale(identifier, sep='_'):
"""Parse a locale identifier into a tuple of the form ``(language,
territory, script, variant)``.
>>> parse_locale('zh_CN')
('zh', 'CN', None, None)
>>> parse_locale('zh_Hans_CN')
('zh', 'CN', 'Hans', None)
The default component separator is "_", but a different separator can be
specified using the `sep` parameter:
>>> parse_locale('zh-CN', sep='-')
('zh', 'CN', None, None)
If the identifier cannot be parsed into a locale, a `ValueError` exception
is raised:
>>> parse_locale('not_a_LOCALE_String')
Traceback (most recent call last):
...
ValueError: 'not_a_LOCALE_String' is not a valid locale identifier
Encoding information and locale modifiers are removed from the identifier:
>>> parse_locale('it_IT@euro')
('it', 'IT', None, None)
>>> parse_locale('en_US.UTF-8')
('en', 'US', None, None)
>>> parse_locale('de_DE.iso885915@euro')
('de', 'DE', None, None)
See :rfc:`4646` for more information.
:param identifier: the locale identifier string
:param sep: character that separates the different components of the locale
identifier
:raise `ValueError`: if the string does not appear to be a valid locale
identifier
"""
if '.' in identifier:
# this is probably the charset/encoding, which we don't care about
identifier = identifier.split('.', 1)[0]
if '@' in identifier:
# this is a locale modifier such as @euro, which we don't care about
# either
identifier = identifier.split('@', 1)[0]
parts = identifier.split(sep)
lang = parts.pop(0).lower()
if not lang.isalpha():
raise ValueError('expected only letters, got %r' % lang)
script = territory = variant = None
if parts:
if len(parts[0]) == 4 and parts[0].isalpha():
script = parts.pop(0).title()
if parts:
if len(parts[0]) == 2 and parts[0].isalpha():
territory = parts.pop(0).upper()
elif len(parts[0]) == 3 and parts[0].isdigit():
territory = parts.pop(0)
if parts:
if len(parts[0]) == 4 and parts[0][0].isdigit() or \
len(parts[0]) >= 5 and parts[0][0].isalpha():
variant = parts.pop()
if parts:
raise ValueError('%r is not a valid locale identifier' % identifier)
return lang, territory, script, variant
def get_locale_identifier(tup, sep='_'):
"""The reverse of :func:`parse_locale`. It creates a locale identifier out
of a ``(language, territory, script, variant)`` tuple. Items can be set to
``None`` and trailing ``None``\s can also be left out of the tuple.
>>> get_locale_identifier(('de', 'DE', None, '1999'))
'de_DE_1999'
.. versionadded:: 1.0
:param tup: the tuple as returned by :func:`parse_locale`.
:param sep: the separator for the identifier.
"""
tup = tuple(tup[:4])
lang, territory, script, variant = tup + (None,) * (4 - len(tup))
return sep.join(filter(None, (lang, script, territory, variant)))
|
kawamon/hue | refs/heads/master | desktop/libs/notebook/src/notebook/connectors/flink_sql.py | 2 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import logging
import json
import posixpath
import sys
from desktop.lib.i18n import force_unicode
from desktop.lib.rest.http_client import HttpClient, RestException
from desktop.lib.rest.resource import Resource
from notebook.connectors.base import Api, QueryError
if sys.version_info[0] > 2:
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
LOG = logging.getLogger(__name__)
_JSON_CONTENT_TYPE = 'application/json'
_API_VERSION = 'v1'
SESSIONS = {}
SESSION_KEY = '%(username)s-%(connector_name)s'
n = 0
def query_error_handler(func):
def decorator(*args, **kwargs):
try:
return func(*args, **kwargs)
except RestException as e:
try:
message = force_unicode(json.loads(e.message)['errors'])
except:
message = e.message
message = force_unicode(message)
raise QueryError(message)
except Exception as e:
message = force_unicode(str(e))
raise QueryError(message)
return decorator
class FlinkSqlApi(Api):
def __init__(self, user, interpreter=None):
Api.__init__(self, user, interpreter=interpreter)
self.options = interpreter['options']
api_url = self.options['url']
self.db = FlinkSqlClient(user=user, api_url=api_url)
@query_error_handler
def create_session(self, lang=None, properties=None):
session = self._get_session()
response = {
'type': lang,
'id': session['session_id']
}
return response
def _get_session(self):
session_key = SESSION_KEY % {
'username': self.user.username,
'connector_name': self.interpreter['name']
}
if session_key not in SESSIONS:
SESSIONS[session_key] = self.db.create_session()
try:
self.db.session_heartbeat(session_id=SESSIONS[session_key]['session_id'])
except Exception as e:
if 'Session: %(id)s does not exist' % SESSIONS[session_key] in str(e):
LOG.warning('Session: %(id)s does not exist, opening a new one' % SESSIONS[session_key])
SESSIONS[session_key] = self.db.create_session()
else:
raise e
SESSIONS[session_key]['id'] = SESSIONS[session_key]['session_id']
return SESSIONS[session_key]
@query_error_handler
def execute(self, notebook, snippet):
global n
n = 0
session = self._get_session()
session_id = session['id']
job_id = None
statement = snippet['statement'].strip().rstrip(';')
resp = self.db.execute_statement(session_id=session_id, statement=statement)
if resp['statement_types'][0] == 'SELECT':
job_id = resp['results'][0]['data'][0][0]
data, description = [], []
# TODO: change_flags
else:
data, description = resp['results'][0]['data'], resp['results'][0]['columns']
has_result_set = data is not None
return {
'sync': job_id is None,
'has_result_set': has_result_set,
'guid': job_id,
'result': {
'has_more': job_id is not None,
'data': data if job_id is None else [],
'meta': [{
'name': col['name'],
'type': col['type'],
'comment': ''
}
for col in description
]
if has_result_set else [],
'type': 'table'
}
}
@query_error_handler
def check_status(self, notebook, snippet):
global n
response = {}
session = self._get_session()
status = 'expired'
if snippet.get('result'):
statement_id = snippet['result']['handle']['guid']
if session:
if not statement_id: # Sync result
status = 'available'
else:
try:
resp = self.db.fetch_status(session['id'], statement_id)
if resp.get('status') == 'RUNNING':
status = 'streaming'
response['result'] = self.fetch_result(notebook, snippet, n, False)
elif resp.get('status') == 'FINISHED':
status = 'available'
elif resp.get('status') == 'FAILED':
status = 'failed'
elif resp.get('status') == 'CANCELED':
status = 'expired'
except Exception as e:
if '%s does not exist in current session' % statement_id in str(e):
LOG.warning('Job: %s does not exist' % statement_id)
else:
raise e
response['status'] = status
return response
@query_error_handler
def fetch_result(self, notebook, snippet, rows, start_over):
global n
session = self._get_session()
statement_id = snippet['result']['handle']['guid']
token = n #rows
resp = self.db.fetch_results(session['id'], job_id=statement_id, token=token)
next_result = resp.get('next_result_uri')
if next_result:
n = int(next_result.rsplit('/', 1)[-1])
return {
'has_more': bool(next_result),
'data': resp and resp['results'][0]['data'] or [], # No escaping...
'meta': [{
'name': column['name'],
'type': column['type'],
'comment': ''
}
for column in resp['results'][0]['columns'] if resp
],
'type': 'table'
}
@query_error_handler
def autocomplete(self, snippet, database=None, table=None, column=None, nested=None, operation=None):
response = {}
if database is None:
response['databases'] = self._show_databases()
elif table is None:
response['tables_meta'] = self._show_tables(database)
elif column is None:
columns = self._get_columns(database, table)
response['columns'] = [col['name'] for col in columns]
response['extended_columns'] = [{
'comment': col.get('comment'),
'name': col.get('name'),
'type': col['type']
}
for col in columns
]
return response
@query_error_handler
def get_sample_data(self, snippet, database=None, table=None, column=None, is_async=False, operation=None):
if operation == 'hello':
snippet['statement'] = "SELECT 'Hello World!'"
notebook = {}
sample = self.execute(notebook, snippet)
response = {
'status': 0,
'result': {}
}
response['rows'] = sample['result']['data']
response['full_headers'] = sample['result']['meta']
return response
def cancel(self, notebook, snippet):
session = self._get_session()
statement_id = snippet['result']['handle']['guid']
try:
if session and statement_id:
self.db.close_statement(session_id=session['id'], job_id=statement_id)
else:
return {'status': -1} # missing operation ids
except Exception as e:
if 'does not exist in current session:' in str(e):
return {'status': -1} # skipped
else:
raise e
return {'status': 0}
def close_session(self, session):
# Avoid closing session on page refresh or editor close for now
pass
# session = self._get_session()
# self.db.close_session(session['id'])
def _show_databases(self):
session = self._get_session()
session_id = session['id']
resp = self.db.execute_statement(session_id=session_id, statement='SHOW DATABASES')
return [db[0] for db in resp['results'][0]['data']]
def _show_tables(self, database):
session = self._get_session()
session_id = session['id']
resp = self.db.execute_statement(session_id=session_id, statement='USE %(database)s' % {'database': database})
resp = self.db.execute_statement(session_id=session_id, statement='SHOW TABLES')
return [table[0] for table in resp['results'][0]['data']]
def _get_columns(self, database, table):
session = self._get_session()
session_id = session['id']
resp = self.db.execute_statement(session_id=session_id, statement='USE %(database)s' % {'database': database})
resp = self.db.execute_statement(session_id=session_id, statement='DESCRIBE %(table)s' % {'table': table})
columns = resp['results'][0]['data']
return [{
'name': col[0],
'type': col[1], # Types to unify
'comment': '',
}
for col in columns
]
class FlinkSqlClient():
'''
Implements https://github.com/ververica/flink-sql-gateway
Could be a pip module or sqlalchemy dialect in the future.
'''
def __init__(self, user, api_url):
self.user = user
self._url = posixpath.join(api_url + '/' + _API_VERSION + '/')
self._client = HttpClient(self._url, logger=LOG)
self._root = Resource(self._client)
def __str__(self):
return "FlinkClient at %s" % (self._url,)
def info(self):
return self._root.get('info')
def create_session(self, **properties):
data = {
"session_name": "test", # optional
"planner": "blink", # required, "old"/"blink"
"execution_type": "streaming", # required, "batch"/"streaming"
"properties": { # optional
"key": "value"
}
}
data.update(properties)
return self._root.post('sessions', data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
def session_heartbeat(self, session_id):
return self._root.post('sessions/%(session_id)s/heartbeat' % {'session_id': session_id})
def execute_statement(self, session_id, statement):
data = {
"statement": statement, # required
"execution_timeout": "" # execution time limit in milliseconds, optional, but required for stream SELECT ?
}
return self._root.post(
'sessions/%(session_id)s/statements' % {
'session_id': session_id
},
data=json.dumps(data),
contenttype=_JSON_CONTENT_TYPE
)
def fetch_status(self, session_id, job_id):
return self._root.get(
'sessions/%(session_id)s/jobs/%(job_id)s/status' % {
'session_id': session_id,
'job_id': job_id
}
)
def fetch_results(self, session_id, job_id, token=0):
return self._root.get(
'sessions/%(session_id)s/jobs/%(job_id)s/result/%(token)s' % {
'session_id': session_id,
'job_id': job_id,
'token': token
}
)
def close_statement(self, session_id, job_id):
return self._root.delete(
'sessions/%(session_id)s/jobs/%(job_id)s' % {
'session_id': session_id,
'job_id': job_id,
}
)
def close_session(self, session_id):
return self._root.delete(
'sessions/%(session_id)s' % {
'session_id': session_id,
}
)
|
NeuralEnsemble/neuroConstruct | refs/heads/master | lib/jython/Lib/test/test_univnewlines2k.py | 137 | # Tests universal newline support for both reading and parsing files.
import unittest
import os
import sys
from test import test_support
if not hasattr(sys.stdin, 'newlines'):
raise unittest.SkipTest, \
"This Python does not have universal newline support"
FATX = 'x' * (2**14)
DATA_TEMPLATE = [
"line1=1",
"line2='this is a very long line designed to go past the magic " +
"hundred character limit that is inside fileobject.c and which " +
"is meant to speed up the common case, but we also want to test " +
"the uncommon case, naturally.'",
"def line3():pass",
"line4 = '%s'" % FATX,
]
DATA_LF = "\n".join(DATA_TEMPLATE) + "\n"
DATA_CR = "\r".join(DATA_TEMPLATE) + "\r"
DATA_CRLF = "\r\n".join(DATA_TEMPLATE) + "\r\n"
# Note that DATA_MIXED also tests the ability to recognize a lone \r
# before end-of-file.
DATA_MIXED = "\n".join(DATA_TEMPLATE) + "\r"
DATA_SPLIT = [x + "\n" for x in DATA_TEMPLATE]
del x
class TestGenericUnivNewlines(unittest.TestCase):
# use a class variable DATA to define the data to write to the file
# and a class variable NEWLINE to set the expected newlines value
READMODE = 'U'
WRITEMODE = 'wb'
def setUp(self):
with open(test_support.TESTFN, self.WRITEMODE) as fp:
fp.write(self.DATA)
def tearDown(self):
try:
os.unlink(test_support.TESTFN)
except:
pass
def test_read(self):
with open(test_support.TESTFN, self.READMODE) as fp:
data = fp.read()
self.assertEqual(data, DATA_LF)
self.assertEqual(repr(fp.newlines), repr(self.NEWLINE))
def test_readlines(self):
with open(test_support.TESTFN, self.READMODE) as fp:
data = fp.readlines()
self.assertEqual(data, DATA_SPLIT)
self.assertEqual(repr(fp.newlines), repr(self.NEWLINE))
def test_readline(self):
with open(test_support.TESTFN, self.READMODE) as fp:
data = []
d = fp.readline()
while d:
data.append(d)
d = fp.readline()
self.assertEqual(data, DATA_SPLIT)
self.assertEqual(repr(fp.newlines), repr(self.NEWLINE))
def test_seek(self):
with open(test_support.TESTFN, self.READMODE) as fp:
fp.readline()
pos = fp.tell()
data = fp.readlines()
self.assertEqual(data, DATA_SPLIT[1:])
fp.seek(pos)
data = fp.readlines()
self.assertEqual(data, DATA_SPLIT[1:])
def test_execfile(self):
namespace = {}
with test_support.check_py3k_warnings():
execfile(test_support.TESTFN, namespace)
func = namespace['line3']
self.assertEqual(func.func_code.co_firstlineno, 3)
self.assertEqual(namespace['line4'], FATX)
class TestNativeNewlines(TestGenericUnivNewlines):
NEWLINE = None
DATA = DATA_LF
READMODE = 'r'
WRITEMODE = 'w'
class TestCRNewlines(TestGenericUnivNewlines):
NEWLINE = '\r'
DATA = DATA_CR
class TestLFNewlines(TestGenericUnivNewlines):
NEWLINE = '\n'
DATA = DATA_LF
class TestCRLFNewlines(TestGenericUnivNewlines):
NEWLINE = '\r\n'
DATA = DATA_CRLF
def test_tell(self):
with open(test_support.TESTFN, self.READMODE) as fp:
self.assertEqual(repr(fp.newlines), repr(None))
data = fp.readline()
pos = fp.tell()
self.assertEqual(repr(fp.newlines), repr(self.NEWLINE))
class TestMixedNewlines(TestGenericUnivNewlines):
NEWLINE = ('\r', '\n')
DATA = DATA_MIXED
def test_main():
test_support.run_unittest(
TestNativeNewlines,
TestCRNewlines,
TestLFNewlines,
TestCRLFNewlines,
TestMixedNewlines
)
if __name__ == '__main__':
test_main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.